1# -*- mode:python -*-
2
3# Copyright (c) 2013 ARM Limited
4# All rights reserved.
5#
6# The license below extends only to copyright in the software and shall
7# not be construed as granting a license to any other intellectual
8# property including but not limited to intellectual property relating
9# to a hardware implementation of the functionality of the software
10# licensed hereunder. You may use the software subject to the license
11# terms below provided that you ensure that this notice is replicated
12# unmodified and in its entirety in all distributions of the software,
13# modified or unmodified, in source code or in binary form.
14#
15# Copyright (c) 2011 Advanced Micro Devices, Inc.
16# Copyright (c) 2009 The Hewlett-Packard Development Company
17# Copyright (c) 2004-2005 The Regents of The University of Michigan
18# All rights reserved.
19#
20# Redistribution and use in source and binary forms, with or without
21# modification, are permitted provided that the following conditions are
22# met: redistributions of source code must retain the above copyright
23# notice, this list of conditions and the following disclaimer;
24# redistributions in binary form must reproduce the above copyright
25# notice, this list of conditions and the following disclaimer in the
26# documentation and/or other materials provided with the distribution;
27# neither the name of the copyright holders nor the names of its
28# contributors may be used to endorse or promote products derived from
29# this software without specific prior written permission.
30#
31# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
32# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
33# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
34# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
35# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
37# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
38# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
39# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
40# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
41# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
42#
43# Authors: Steve Reinhardt
44# Nathan Binkert
45
46###################################################
47#
48# SCons top-level build description (SConstruct) file.
49#
50# While in this directory ('gem5'), just type 'scons' to build the default
51# configuration (see below), or type 'scons build/<CONFIG>/<binary>'
52# to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for
53# the optimized full-system version).
54#
55# You can build gem5 in a different directory as long as there is a
56# 'build/<CONFIG>' somewhere along the target path. The build system
57# expects that all configs under the same build directory are being
58# built for the same host system.
59#
60# Examples:
61#
62# The following two commands are equivalent. The '-u' option tells
63# scons to search up the directory tree for this SConstruct file.
64# % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug
65# % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug
66#
67# The following two commands are equivalent and demonstrate building
68# in a directory outside of the source tree. The '-C' option tells
69# scons to chdir to the specified directory to find this SConstruct
70# file.
71# % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug
72# % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug
73#
74# You can use 'scons -H' to print scons options. If you're in this
75# 'gem5' directory (or use -u or -C to tell scons where to find this
76# file), you can use 'scons -h' to print all the gem5-specific build
77# options as well.
78#
79###################################################
80
81# Check for recent-enough Python and SCons versions.
82try:
83 # Really old versions of scons only take two options for the
84 # function, so check once without the revision and once with the
85 # revision, the first instance will fail for stuff other than
86 # 0.98, and the second will fail for 0.98.0
87 EnsureSConsVersion(0, 98)
88 EnsureSConsVersion(0, 98, 1)
89except SystemExit, e:
90 print """
91For more details, see:
92 http://gem5.org/Dependencies
93"""
94 raise
95
96# We ensure the python version early because because python-config
97# requires python 2.5
98try:
99 EnsurePythonVersion(2, 5)
100except SystemExit, e:
101 print """
102You can use a non-default installation of the Python interpreter by
103rearranging your PATH so that scons finds the non-default 'python' and
104'python-config' first.
105
106For more details, see:
107 http://gem5.org/wiki/index.php/Using_a_non-default_Python_installation
108"""
109 raise
110
111# Global Python includes
112import itertools
113import os
114import re
115import subprocess
116import sys
117
118from os import mkdir, environ
119from os.path import abspath, basename, dirname, expanduser, normpath
120from os.path import exists, isdir, isfile
121from os.path import join as joinpath, split as splitpath
122
123# SCons includes
124import SCons
125import SCons.Node
126
127extra_python_paths = [
128 Dir('src/python').srcnode().abspath, # gem5 includes
129 Dir('ext/ply').srcnode().abspath, # ply is used by several files
130 ]
131
132sys.path[1:1] = extra_python_paths
133
134from m5.util import compareVersions, readCommand
135from m5.util.terminal import get_termcap
136
137help_texts = {
138 "options" : "",
139 "global_vars" : "",
140 "local_vars" : ""
141}
142
143Export("help_texts")
144
145
146# There's a bug in scons in that (1) by default, the help texts from
147# AddOption() are supposed to be displayed when you type 'scons -h'
148# and (2) you can override the help displayed by 'scons -h' using the
149# Help() function, but these two features are incompatible: once
150# you've overridden the help text using Help(), there's no way to get
151# at the help texts from AddOptions. See:
152# http://scons.tigris.org/issues/show_bug.cgi?id=2356
153# http://scons.tigris.org/issues/show_bug.cgi?id=2611
154# This hack lets us extract the help text from AddOptions and
155# re-inject it via Help(). Ideally someday this bug will be fixed and
156# we can just use AddOption directly.
157def AddLocalOption(*args, **kwargs):
158 col_width = 30
159
160 help = " " + ", ".join(args)
161 if "help" in kwargs:
162 length = len(help)
163 if length >= col_width:
164 help += "\n" + " " * col_width
165 else:
166 help += " " * (col_width - length)
167 help += kwargs["help"]
168 help_texts["options"] += help + "\n"
169
170 AddOption(*args, **kwargs)
171
172AddLocalOption('--colors', dest='use_colors', action='store_true',
173 help="Add color to abbreviated scons output")
174AddLocalOption('--no-colors', dest='use_colors', action='store_false',
175 help="Don't add color to abbreviated scons output")
176AddLocalOption('--default', dest='default', type='string', action='store',
177 help='Override which build_opts file to use for defaults')
178AddLocalOption('--ignore-style', dest='ignore_style', action='store_true',
179 help='Disable style checking hooks')
180AddLocalOption('--no-lto', dest='no_lto', action='store_true',
181 help='Disable Link-Time Optimization for fast')
182AddLocalOption('--update-ref', dest='update_ref', action='store_true',
183 help='Update test reference outputs')
184AddLocalOption('--verbose', dest='verbose', action='store_true',
185 help='Print full tool command lines')
186
187termcap = get_termcap(GetOption('use_colors'))
188
189########################################################################
190#
191# Set up the main build environment.
192#
193########################################################################
194
195# export TERM so that clang reports errors in color
196use_vars = set([ 'AS', 'AR', 'CC', 'CXX', 'HOME', 'LD_LIBRARY_PATH',
197 'LIBRARY_PATH', 'PATH', 'PKG_CONFIG_PATH', 'PROTOC',
198 'PYTHONPATH', 'RANLIB', 'SWIG', 'TERM' ])
199
200use_prefixes = [
201 "M5", # M5 configuration (e.g., path to kernels)
202 "DISTCC_", # distcc (distributed compiler wrapper) configuration
203 "CCACHE_", # ccache (caching compiler wrapper) configuration
204 "CCC_", # clang static analyzer configuration
205 ]
206
207use_env = {}
208for key,val in os.environ.iteritems():
209 if key in use_vars or \
210 any([key.startswith(prefix) for prefix in use_prefixes]):
211 use_env[key] = val
212
213main = Environment(ENV=use_env)
214main.Decider('MD5-timestamp')
215main.root = Dir(".") # The current directory (where this file lives).
216main.srcdir = Dir("src") # The source directory
217
218main_dict_keys = main.Dictionary().keys()
219
220# Check that we have a C/C++ compiler
221if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys):
222 print "No C++ compiler installed (package g++ on Ubuntu and RedHat)"
223 Exit(1)
224
225# Check that swig is present
226if not 'SWIG' in main_dict_keys:
227 print "swig is not installed (package swig on Ubuntu and RedHat)"
228 Exit(1)
229
230# add useful python code PYTHONPATH so it can be used by subprocesses
231# as well
232main.AppendENVPath('PYTHONPATH', extra_python_paths)
233
234########################################################################
235#
236# Mercurial Stuff.
237#
238# If the gem5 directory is a mercurial repository, we should do some
239# extra things.
240#
241########################################################################
242
243hgdir = main.root.Dir(".hg")
244
245mercurial_style_message = """
246You're missing the gem5 style hook, which automatically checks your code
247against the gem5 style rules on hg commit and qrefresh commands. This
248script will now install the hook in your .hg/hgrc file.
249Press enter to continue, or ctrl-c to abort: """
250
251mercurial_style_hook = """
252# The following lines were automatically added by gem5/SConstruct
253# to provide the gem5 style-checking hooks
254[extensions]
255style = %s/util/style.py
256
257[hooks]
258pretxncommit.style = python:style.check_style
259pre-qrefresh.style = python:style.check_style
260# End of SConstruct additions
261
262""" % (main.root.abspath)
263
264mercurial_lib_not_found = """
265Mercurial libraries cannot be found, ignoring style hook. If
266you are a gem5 developer, please fix this and run the style
267hook. It is important.
268"""
269
270# Check for style hook and prompt for installation if it's not there.
271# Skip this if --ignore-style was specified, there's no .hg dir to
272# install a hook in, or there's no interactive terminal to prompt.
273if not GetOption('ignore_style') and hgdir.exists() and sys.stdin.isatty():
274 style_hook = True
275 try:
276 from mercurial import ui
277 ui = ui.ui()
278 ui.readconfig(hgdir.File('hgrc').abspath)
279 style_hook = ui.config('hooks', 'pretxncommit.style', None) and \
280 ui.config('hooks', 'pre-qrefresh.style', None)
281 except ImportError:
282 print mercurial_lib_not_found
283
284 if not style_hook:
285 print mercurial_style_message,
286 # continue unless user does ctrl-c/ctrl-d etc.
287 try:
288 raw_input()
289 except:
290 print "Input exception, exiting scons.\n"
291 sys.exit(1)
292 hgrc_path = '%s/.hg/hgrc' % main.root.abspath
293 print "Adding style hook to", hgrc_path, "\n"
294 try:
295 hgrc = open(hgrc_path, 'a')
296 hgrc.write(mercurial_style_hook)
297 hgrc.close()
298 except:
299 print "Error updating", hgrc_path
300 sys.exit(1)
301
302
303###################################################
304#
305# Figure out which configurations to set up based on the path(s) of
306# the target(s).
307#
308###################################################
309
310# Find default configuration & binary.
311Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug'))
312
313# helper function: find last occurrence of element in list
314def rfind(l, elt, offs = -1):
315 for i in range(len(l)+offs, 0, -1):
316 if l[i] == elt:
317 return i
318 raise ValueError, "element not found"
319
320# Take a list of paths (or SCons Nodes) and return a list with all
321# paths made absolute and ~-expanded. Paths will be interpreted
322# relative to the launch directory unless a different root is provided
323def makePathListAbsolute(path_list, root=GetLaunchDir()):
324 return [abspath(joinpath(root, expanduser(str(p))))
325 for p in path_list]
326
327# Each target must have 'build' in the interior of the path; the
328# directory below this will determine the build parameters. For
329# example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we
330# recognize that ALPHA_SE specifies the configuration because it
331# follow 'build' in the build path.
332
333# The funky assignment to "[:]" is needed to replace the list contents
334# in place rather than reassign the symbol to a new list, which
335# doesn't work (obviously!).
336BUILD_TARGETS[:] = makePathListAbsolute(BUILD_TARGETS)
337
338# Generate a list of the unique build roots and configs that the
339# collected targets reference.
340variant_paths = []
341build_root = None
342for t in BUILD_TARGETS:
343 path_dirs = t.split('/')
344 try:
345 build_top = rfind(path_dirs, 'build', -2)
346 except:
347 print "Error: no non-leaf 'build' dir found on target path", t
348 Exit(1)
349 this_build_root = joinpath('/',*path_dirs[:build_top+1])
350 if not build_root:
351 build_root = this_build_root
352 else:
353 if this_build_root != build_root:
354 print "Error: build targets not under same build root\n"\
355 " %s\n %s" % (build_root, this_build_root)
356 Exit(1)
357 variant_path = joinpath('/',*path_dirs[:build_top+2])
358 if variant_path not in variant_paths:
359 variant_paths.append(variant_path)
360
361# Make sure build_root exists (might not if this is the first build there)
362if not isdir(build_root):
363 mkdir(build_root)
364main['BUILDROOT'] = build_root
365
366Export('main')
367
368main.SConsignFile(joinpath(build_root, "sconsign"))
369
370# Default duplicate option is to use hard links, but this messes up
371# when you use emacs to edit a file in the target dir, as emacs moves
372# file to file~ then copies to file, breaking the link. Symbolic
373# (soft) links work better.
374main.SetOption('duplicate', 'soft-copy')
375
376#
377# Set up global sticky variables... these are common to an entire build
378# tree (not specific to a particular build like ALPHA_SE)
379#
380
381global_vars_file = joinpath(build_root, 'variables.global')
382
383global_vars = Variables(global_vars_file, args=ARGUMENTS)
384
385global_vars.AddVariables(
386 ('CC', 'C compiler', environ.get('CC', main['CC'])),
387 ('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])),
388 ('SWIG', 'SWIG tool', environ.get('SWIG', main['SWIG'])),
389 ('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')),
390 ('BATCH', 'Use batch pool for build and tests', False),
391 ('BATCH_CMD', 'Batch pool submission command name', 'qdo'),
392 ('M5_BUILD_CACHE', 'Cache built objects in this directory', False),
393 ('EXTRAS', 'Add extra directories to the compilation', '')
394 )
395
396# Update main environment with values from ARGUMENTS & global_vars_file
397global_vars.Update(main)
398help_texts["global_vars"] += global_vars.GenerateHelpText(main)
399
400# Save sticky variable settings back to current variables file
401global_vars.Save(global_vars_file, main)
402
403# Parse EXTRAS variable to build list of all directories where we're
404# look for sources etc. This list is exported as extras_dir_list.
405base_dir = main.srcdir.abspath
406if main['EXTRAS']:
407 extras_dir_list = makePathListAbsolute(main['EXTRAS'].split(':'))
408else:
409 extras_dir_list = []
410
411Export('base_dir')
412Export('extras_dir_list')
413
414# the ext directory should be on the #includes path
415main.Append(CPPPATH=[Dir('ext')])
416
417def strip_build_path(path, env):
418 path = str(path)
419 variant_base = env['BUILDROOT'] + os.path.sep
420 if path.startswith(variant_base):
421 path = path[len(variant_base):]
422 elif path.startswith('build/'):
423 path = path[6:]
424 return path
425
426# Generate a string of the form:
427# common/path/prefix/src1, src2 -> tgt1, tgt2
428# to print while building.
429class Transform(object):
430 # all specific color settings should be here and nowhere else
431 tool_color = termcap.Normal
432 pfx_color = termcap.Yellow
433 srcs_color = termcap.Yellow + termcap.Bold
434 arrow_color = termcap.Blue + termcap.Bold
435 tgts_color = termcap.Yellow + termcap.Bold
436
437 def __init__(self, tool, max_sources=99):
438 self.format = self.tool_color + (" [%8s] " % tool) \
439 + self.pfx_color + "%s" \
440 + self.srcs_color + "%s" \
441 + self.arrow_color + " -> " \
442 + self.tgts_color + "%s" \
443 + termcap.Normal
444 self.max_sources = max_sources
445
446 def __call__(self, target, source, env, for_signature=None):
447 # truncate source list according to max_sources param
448 source = source[0:self.max_sources]
449 def strip(f):
450 return strip_build_path(str(f), env)
451 if len(source) > 0:
452 srcs = map(strip, source)
453 else:
454 srcs = ['']
455 tgts = map(strip, target)
456 # surprisingly, os.path.commonprefix is a dumb char-by-char string
457 # operation that has nothing to do with paths.
458 com_pfx = os.path.commonprefix(srcs + tgts)
459 com_pfx_len = len(com_pfx)
460 if com_pfx:
461 # do some cleanup and sanity checking on common prefix
462 if com_pfx[-1] == ".":
463 # prefix matches all but file extension: ok
464 # back up one to change 'foo.cc -> o' to 'foo.cc -> .o'
465 com_pfx = com_pfx[0:-1]
466 elif com_pfx[-1] == "/":
467 # common prefix is directory path: OK
468 pass
469 else:
470 src0_len = len(srcs[0])
471 tgt0_len = len(tgts[0])
472 if src0_len == com_pfx_len:
473 # source is a substring of target, OK
474 pass
475 elif tgt0_len == com_pfx_len:
476 # target is a substring of source, need to back up to
477 # avoid empty string on RHS of arrow
478 sep_idx = com_pfx.rfind(".")
479 if sep_idx != -1:
480 com_pfx = com_pfx[0:sep_idx]
481 else:
482 com_pfx = ''
483 elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".":
484 # still splitting at file extension: ok
485 pass
486 else:
487 # probably a fluke; ignore it
488 com_pfx = ''
489 # recalculate length in case com_pfx was modified
490 com_pfx_len = len(com_pfx)
491 def fmt(files):
492 f = map(lambda s: s[com_pfx_len:], files)
493 return ', '.join(f)
494 return self.format % (com_pfx, fmt(srcs), fmt(tgts))
495
496Export('Transform')
497
498# enable the regression script to use the termcap
499main['TERMCAP'] = termcap
500
501if GetOption('verbose'):
502 def MakeAction(action, string, *args, **kwargs):
503 return Action(action, *args, **kwargs)
504else:
505 MakeAction = Action
506 main['CCCOMSTR'] = Transform("CC")
507 main['CXXCOMSTR'] = Transform("CXX")
508 main['ASCOMSTR'] = Transform("AS")
509 main['SWIGCOMSTR'] = Transform("SWIG")
510 main['ARCOMSTR'] = Transform("AR", 0)
511 main['LINKCOMSTR'] = Transform("LINK", 0)
512 main['RANLIBCOMSTR'] = Transform("RANLIB", 0)
513 main['M4COMSTR'] = Transform("M4")
514 main['SHCCCOMSTR'] = Transform("SHCC")
515 main['SHCXXCOMSTR'] = Transform("SHCXX")
516Export('MakeAction')
517
518# Initialize the Link-Time Optimization (LTO) flags
519main['LTO_CCFLAGS'] = []
520main['LTO_LDFLAGS'] = []
521
522# According to the readme, tcmalloc works best if the compiler doesn't
523# assume that we're using the builtin malloc and friends. These flags
524# are compiler-specific, so we need to set them after we detect which
525# compiler we're using.
526main['TCMALLOC_CCFLAGS'] = []
527
528CXX_version = readCommand([main['CXX'],'--version'], exception=False)
529CXX_V = readCommand([main['CXX'],'-V'], exception=False)
530
531main['GCC'] = CXX_version and CXX_version.find('g++') >= 0
532main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0
533if main['GCC'] + main['CLANG'] > 1:
534 print 'Error: How can we have two at the same time?'
535 Exit(1)
536
537# Set up default C++ compiler flags
538if main['GCC'] or main['CLANG']:
539 # As gcc and clang share many flags, do the common parts here
540 main.Append(CCFLAGS=['-pipe'])
541 main.Append(CCFLAGS=['-fno-strict-aliasing'])
542 # Enable -Wall and then disable the few warnings that we
543 # consistently violate
544 main.Append(CCFLAGS=['-Wall', '-Wno-sign-compare', '-Wundef'])
545 # We always compile using C++11, but only gcc >= 4.7 and clang 3.1
546 # actually use that name, so we stick with c++0x
547 main.Append(CXXFLAGS=['-std=c++0x'])
548 # Add selected sanity checks from -Wextra
549 main.Append(CXXFLAGS=['-Wmissing-field-initializers',
550 '-Woverloaded-virtual'])
551else:
552 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
553 print "Don't know what compiler options to use for your compiler."
554 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX']
555 print termcap.Yellow + ' version:' + termcap.Normal,
556 if not CXX_version:
557 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
558 termcap.Normal
559 else:
560 print CXX_version.replace('\n', '<nl>')
561 print " If you're trying to use a compiler other than GCC"
562 print " or clang, there appears to be something wrong with your"
563 print " environment."
564 print " "
565 print " If you are trying to use a compiler other than those listed"
566 print " above you will need to ease fix SConstruct and "
567 print " src/SConscript to support that compiler."
568 Exit(1)
569
570if main['GCC']:
571 # Check for a supported version of gcc. >= 4.6 is chosen for its
572 # level of c++11 support. See
573 # http://gcc.gnu.org/projects/cxx0x.html for details. 4.6 is also
574 # the first version with proper LTO support.
575 gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False)
576 if compareVersions(gcc_version, "4.6") < 0:
577 print 'Error: gcc version 4.6 or newer required.'
578 print ' Installed version:', gcc_version
579 Exit(1)
580
581 main['GCC_VERSION'] = gcc_version
582
583 # gcc from version 4.8 and above generates "rep; ret" instructions
584 # to avoid performance penalties on certain AMD chips. Older
585 # assemblers detect this as an error, "Error: expecting string
586 # instruction after `rep'"
587 if compareVersions(gcc_version, "4.8") > 0:
588 as_version = readCommand([main['AS'], '-v', '/dev/null'],
589 exception=False).split()
590
591 if not as_version or compareVersions(as_version[-1], "2.23") < 0:
592 print termcap.Yellow + termcap.Bold + \
593 'Warning: This combination of gcc and binutils have' + \
594 ' known incompatibilities.\n' + \
595 ' If you encounter build problems, please update ' + \
596 'binutils to 2.23.' + \
597 termcap.Normal
598
599 # Add the appropriate Link-Time Optimization (LTO) flags
600 # unless LTO is explicitly turned off. Note that these flags
601 # are only used by the fast target.
602 if not GetOption('no_lto'):
603 # Pass the LTO flag when compiling to produce GIMPLE
604 # output, we merely create the flags here and only append
605 # them later/
605 # them later
606 main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
607
608 # Use the same amount of jobs for LTO as we are running
609 # scons with, we hardcode the use of the linker plugin
610 # which requires either gold or GNU ld >= 2.21
611 main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs'),
612 '-fuse-linker-plugin']
609 # scons with
610 main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
611
612 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc',
613 '-fno-builtin-realloc', '-fno-builtin-free'])
614
615elif main['CLANG']:
616 # Check for a supported version of clang, >= 3.0 is needed to
617 # support similar features as gcc 4.6. See
618 # http://clang.llvm.org/cxx_status.html for details
619 clang_version_re = re.compile(".* version (\d+\.\d+)")
620 clang_version_match = clang_version_re.search(CXX_version)
621 if (clang_version_match):
622 clang_version = clang_version_match.groups()[0]
623 if compareVersions(clang_version, "3.0") < 0:
624 print 'Error: clang version 3.0 or newer required.'
625 print ' Installed version:', clang_version
626 Exit(1)
627 else:
628 print 'Error: Unable to determine clang version.'
629 Exit(1)
630
631 # clang has a few additional warnings that we disable,
632 # tautological comparisons are allowed due to unsigned integers
633 # being compared to constants that happen to be 0, and extraneous
634 # parantheses are allowed due to Ruby's printing of the AST,
635 # finally self assignments are allowed as the generated CPU code
636 # is relying on this
637 main.Append(CCFLAGS=['-Wno-tautological-compare',
638 '-Wno-parentheses',
639 '-Wno-self-assign',
640 # Some versions of libstdc++ (4.8?) seem to
641 # use struct hash and class hash
642 # interchangeably.
643 '-Wno-mismatched-tags',
644 ])
645
646 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin'])
647
648 # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as
649 # opposed to libstdc++, as the later is dated.
650 if sys.platform == "darwin":
651 main.Append(CXXFLAGS=['-stdlib=libc++'])
652 main.Append(LIBS=['c++'])
653
654else:
655 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
656 print "Don't know what compiler options to use for your compiler."
657 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX']
658 print termcap.Yellow + ' version:' + termcap.Normal,
659 if not CXX_version:
660 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
661 termcap.Normal
662 else:
663 print CXX_version.replace('\n', '<nl>')
664 print " If you're trying to use a compiler other than GCC"
665 print " or clang, there appears to be something wrong with your"
666 print " environment."
667 print " "
668 print " If you are trying to use a compiler other than those listed"
669 print " above you will need to ease fix SConstruct and "
670 print " src/SConscript to support that compiler."
671 Exit(1)
672
673# Set up common yacc/bison flags (needed for Ruby)
674main['YACCFLAGS'] = '-d'
675main['YACCHXXFILESUFFIX'] = '.hh'
676
677# Do this after we save setting back, or else we'll tack on an
678# extra 'qdo' every time we run scons.
679if main['BATCH']:
680 main['CC'] = main['BATCH_CMD'] + ' ' + main['CC']
681 main['CXX'] = main['BATCH_CMD'] + ' ' + main['CXX']
682 main['AS'] = main['BATCH_CMD'] + ' ' + main['AS']
683 main['AR'] = main['BATCH_CMD'] + ' ' + main['AR']
684 main['RANLIB'] = main['BATCH_CMD'] + ' ' + main['RANLIB']
685
686if sys.platform == 'cygwin':
687 # cygwin has some header file issues...
688 main.Append(CCFLAGS=["-Wno-uninitialized"])
689
690# Check for the protobuf compiler
691protoc_version = readCommand([main['PROTOC'], '--version'],
692 exception='').split()
693
694# First two words should be "libprotoc x.y.z"
695if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc':
696 print termcap.Yellow + termcap.Bold + \
697 'Warning: Protocol buffer compiler (protoc) not found.\n' + \
698 ' Please install protobuf-compiler for tracing support.' + \
699 termcap.Normal
700 main['PROTOC'] = False
701else:
702 # Based on the availability of the compress stream wrappers,
703 # require 2.1.0
704 min_protoc_version = '2.1.0'
705 if compareVersions(protoc_version[1], min_protoc_version) < 0:
706 print termcap.Yellow + termcap.Bold + \
707 'Warning: protoc version', min_protoc_version, \
708 'or newer required.\n' + \
709 ' Installed version:', protoc_version[1], \
710 termcap.Normal
711 main['PROTOC'] = False
712 else:
713 # Attempt to determine the appropriate include path and
714 # library path using pkg-config, that means we also need to
715 # check for pkg-config. Note that it is possible to use
716 # protobuf without the involvement of pkg-config. Later on we
717 # check go a library config check and at that point the test
718 # will fail if libprotobuf cannot be found.
719 if readCommand(['pkg-config', '--version'], exception=''):
720 try:
721 # Attempt to establish what linking flags to add for protobuf
722 # using pkg-config
723 main.ParseConfig('pkg-config --cflags --libs-only-L protobuf')
724 except:
725 print termcap.Yellow + termcap.Bold + \
726 'Warning: pkg-config could not get protobuf flags.' + \
727 termcap.Normal
728
729# Check for SWIG
730if not main.has_key('SWIG'):
731 print 'Error: SWIG utility not found.'
732 print ' Please install (see http://www.swig.org) and retry.'
733 Exit(1)
734
735# Check for appropriate SWIG version
736swig_version = readCommand([main['SWIG'], '-version'], exception='').split()
737# First 3 words should be "SWIG Version x.y.z"
738if len(swig_version) < 3 or \
739 swig_version[0] != 'SWIG' or swig_version[1] != 'Version':
740 print 'Error determining SWIG version.'
741 Exit(1)
742
743min_swig_version = '2.0.4'
744if compareVersions(swig_version[2], min_swig_version) < 0:
745 print 'Error: SWIG version', min_swig_version, 'or newer required.'
746 print ' Installed version:', swig_version[2]
747 Exit(1)
748
749# Set up SWIG flags & scanner
750swig_flags=Split('-c++ -python -modern -templatereduce $_CPPINCFLAGS')
751main.Append(SWIGFLAGS=swig_flags)
752
753# Check for 'timeout' from GNU coreutils. If present, regressions
754# will be run with a time limit.
755TIMEOUT_version = readCommand(['timeout', '--version'], exception=False)
756main['TIMEOUT'] = TIMEOUT_version and TIMEOUT_version.find('timeout') == 0
757
758# filter out all existing swig scanners, they mess up the dependency
759# stuff for some reason
760scanners = []
761for scanner in main['SCANNERS']:
762 skeys = scanner.skeys
763 if skeys == '.i':
764 continue
765
766 if isinstance(skeys, (list, tuple)) and '.i' in skeys:
767 continue
768
769 scanners.append(scanner)
770
771# add the new swig scanner that we like better
772from SCons.Scanner import ClassicCPP as CPPScanner
773swig_inc_re = '^[ \t]*[%,#][ \t]*(?:include|import)[ \t]*(<|")([^>"]+)(>|")'
774scanners.append(CPPScanner("SwigScan", [ ".i" ], "CPPPATH", swig_inc_re))
775
776# replace the scanners list that has what we want
777main['SCANNERS'] = scanners
778
779# Add a custom Check function to the Configure context so that we can
780# figure out if the compiler adds leading underscores to global
781# variables. This is needed for the autogenerated asm files that we
782# use for embedding the python code.
783def CheckLeading(context):
784 context.Message("Checking for leading underscore in global variables...")
785 # 1) Define a global variable called x from asm so the C compiler
786 # won't change the symbol at all.
787 # 2) Declare that variable.
788 # 3) Use the variable
789 #
790 # If the compiler prepends an underscore, this will successfully
791 # link because the external symbol 'x' will be called '_x' which
792 # was defined by the asm statement. If the compiler does not
793 # prepend an underscore, this will not successfully link because
794 # '_x' will have been defined by assembly, while the C portion of
795 # the code will be trying to use 'x'
796 ret = context.TryLink('''
797 asm(".globl _x; _x: .byte 0");
798 extern int x;
799 int main() { return x; }
800 ''', extension=".c")
801 context.env.Append(LEADING_UNDERSCORE=ret)
802 context.Result(ret)
803 return ret
804
805# Add a custom Check function to test for structure members.
806def CheckMember(context, include, decl, member, include_quotes="<>"):
807 context.Message("Checking for member %s in %s..." %
808 (member, decl))
809 text = """
810#include %(header)s
811int main(){
812 %(decl)s test;
813 (void)test.%(member)s;
814 return 0;
815};
816""" % { "header" : include_quotes[0] + include + include_quotes[1],
817 "decl" : decl,
818 "member" : member,
819 }
820
821 ret = context.TryCompile(text, extension=".cc")
822 context.Result(ret)
823 return ret
824
825# Platform-specific configuration. Note again that we assume that all
826# builds under a given build root run on the same host platform.
827conf = Configure(main,
828 conf_dir = joinpath(build_root, '.scons_config'),
829 log_file = joinpath(build_root, 'scons_config.log'),
830 custom_tests = {
831 'CheckLeading' : CheckLeading,
832 'CheckMember' : CheckMember,
833 })
834
835# Check for leading underscores. Don't really need to worry either
836# way so don't need to check the return code.
837conf.CheckLeading()
838
839# Check if we should compile a 64 bit binary on Mac OS X/Darwin
840try:
841 import platform
842 uname = platform.uname()
843 if uname[0] == 'Darwin' and compareVersions(uname[2], '9.0.0') >= 0:
844 if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]):
845 main.Append(CCFLAGS=['-arch', 'x86_64'])
846 main.Append(CFLAGS=['-arch', 'x86_64'])
847 main.Append(LINKFLAGS=['-arch', 'x86_64'])
848 main.Append(ASFLAGS=['-arch', 'x86_64'])
849except:
850 pass
851
852# Recent versions of scons substitute a "Null" object for Configure()
853# when configuration isn't necessary, e.g., if the "--help" option is
854# present. Unfortuantely this Null object always returns false,
855# breaking all our configuration checks. We replace it with our own
856# more optimistic null object that returns True instead.
857if not conf:
858 def NullCheck(*args, **kwargs):
859 return True
860
861 class NullConf:
862 def __init__(self, env):
863 self.env = env
864 def Finish(self):
865 return self.env
866 def __getattr__(self, mname):
867 return NullCheck
868
869 conf = NullConf(main)
870
871# Cache build files in the supplied directory.
872if main['M5_BUILD_CACHE']:
873 print 'Using build cache located at', main['M5_BUILD_CACHE']
874 CacheDir(main['M5_BUILD_CACHE'])
875
876# Find Python include and library directories for embedding the
877# interpreter. We rely on python-config to resolve the appropriate
878# includes and linker flags. ParseConfig does not seem to understand
879# the more exotic linker flags such as -Xlinker and -export-dynamic so
880# we add them explicitly below. If you want to link in an alternate
881# version of python, see above for instructions on how to invoke
882# scons with the appropriate PATH set.
883#
884# First we check if python2-config exists, else we use python-config
885python_config = readCommand(['which', 'python2-config'], exception='').strip()
886if not os.path.exists(python_config):
887 python_config = readCommand(['which', 'python-config'],
888 exception='').strip()
889py_includes = readCommand([python_config, '--includes'],
890 exception='').split()
891# Strip the -I from the include folders before adding them to the
892# CPPPATH
893main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes))
894
895# Read the linker flags and split them into libraries and other link
896# flags. The libraries are added later through the call the CheckLib.
897py_ld_flags = readCommand([python_config, '--ldflags'], exception='').split()
898py_libs = []
899for lib in py_ld_flags:
900 if not lib.startswith('-l'):
901 main.Append(LINKFLAGS=[lib])
902 else:
903 lib = lib[2:]
904 if lib not in py_libs:
905 py_libs.append(lib)
906
907# verify that this stuff works
908if not conf.CheckHeader('Python.h', '<>'):
909 print "Error: can't find Python.h header in", py_includes
910 print "Install Python headers (package python-dev on Ubuntu and RedHat)"
911 Exit(1)
912
913for lib in py_libs:
914 if not conf.CheckLib(lib):
915 print "Error: can't find library %s required by python" % lib
916 Exit(1)
917
918# On Solaris you need to use libsocket for socket ops
919if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'):
920 if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'):
921 print "Can't find library with socket calls (e.g. accept())"
922 Exit(1)
923
924# Check for zlib. If the check passes, libz will be automatically
925# added to the LIBS environment variable.
926if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'):
927 print 'Error: did not find needed zlib compression library '\
928 'and/or zlib.h header file.'
929 print ' Please install zlib and try again.'
930 Exit(1)
931
932# If we have the protobuf compiler, also make sure we have the
933# development libraries. If the check passes, libprotobuf will be
934# automatically added to the LIBS environment variable. After
935# this, we can use the HAVE_PROTOBUF flag to determine if we have
936# got both protoc and libprotobuf available.
937main['HAVE_PROTOBUF'] = main['PROTOC'] and \
938 conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h',
939 'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;')
940
941# If we have the compiler but not the library, print another warning.
942if main['PROTOC'] and not main['HAVE_PROTOBUF']:
943 print termcap.Yellow + termcap.Bold + \
944 'Warning: did not find protocol buffer library and/or headers.\n' + \
945 ' Please install libprotobuf-dev for tracing support.' + \
946 termcap.Normal
947
948# Check for librt.
949have_posix_clock = \
950 conf.CheckLibWithHeader(None, 'time.h', 'C',
951 'clock_nanosleep(0,0,NULL,NULL);') or \
952 conf.CheckLibWithHeader('rt', 'time.h', 'C',
953 'clock_nanosleep(0,0,NULL,NULL);')
954
955have_posix_timers = \
956 conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C',
957 'timer_create(CLOCK_MONOTONIC, NULL, NULL);')
958
959if conf.CheckLib('tcmalloc'):
960 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
961elif conf.CheckLib('tcmalloc_minimal'):
962 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
963else:
964 print termcap.Yellow + termcap.Bold + \
965 "You can get a 12% performance improvement by installing tcmalloc "\
966 "(libgoogle-perftools-dev package on Ubuntu or RedHat)." + \
967 termcap.Normal
968
969if not have_posix_clock:
970 print "Can't find library for POSIX clocks."
971
972# Check for <fenv.h> (C99 FP environment control)
973have_fenv = conf.CheckHeader('fenv.h', '<>')
974if not have_fenv:
975 print "Warning: Header file <fenv.h> not found."
976 print " This host has no IEEE FP rounding mode control."
977
978# Check if we should enable KVM-based hardware virtualization. The API
979# we rely on exists since version 2.6.36 of the kernel, but somehow
980# the KVM_API_VERSION does not reflect the change. We test for one of
981# the types as a fall back.
982have_kvm = conf.CheckHeader('linux/kvm.h', '<>') and \
983 conf.CheckTypeSize('struct kvm_xsave', '#include <linux/kvm.h>') != 0
984if not have_kvm:
985 print "Info: Compatible header file <linux/kvm.h> not found, " \
986 "disabling KVM support."
987
988# Check if the requested target ISA is compatible with the host
989def is_isa_kvm_compatible(isa):
990 isa_comp_table = {
991 "arm" : ( "armv7l" ),
992 "x86" : ( "x86_64" ),
993 }
994 try:
995 import platform
996 host_isa = platform.machine()
997 except:
998 print "Warning: Failed to determine host ISA."
999 return False
1000
1001 return host_isa in isa_comp_table.get(isa, [])
1002
1003
1004# Check if the exclude_host attribute is available. We want this to
1005# get accurate instruction counts in KVM.
1006main['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf.CheckMember(
1007 'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host')
1008
1009
1010######################################################################
1011#
1012# Finish the configuration
1013#
1014main = conf.Finish()
1015
1016######################################################################
1017#
1018# Collect all non-global variables
1019#
1020
1021# Define the universe of supported ISAs
1022all_isa_list = [ ]
1023Export('all_isa_list')
1024
1025class CpuModel(object):
1026 '''The CpuModel class encapsulates everything the ISA parser needs to
1027 know about a particular CPU model.'''
1028
1029 # Dict of available CPU model objects. Accessible as CpuModel.dict.
1030 dict = {}
1031
1032 # Constructor. Automatically adds models to CpuModel.dict.
1033 def __init__(self, name, default=False):
1034 self.name = name # name of model
1035
1036 # This cpu is enabled by default
1037 self.default = default
1038
1039 # Add self to dict
1040 if name in CpuModel.dict:
1041 raise AttributeError, "CpuModel '%s' already registered" % name
1042 CpuModel.dict[name] = self
1043
1044Export('CpuModel')
1045
1046# Sticky variables get saved in the variables file so they persist from
1047# one invocation to the next (unless overridden, in which case the new
1048# value becomes sticky).
1049sticky_vars = Variables(args=ARGUMENTS)
1050Export('sticky_vars')
1051
1052# Sticky variables that should be exported
1053export_vars = []
1054Export('export_vars')
1055
1056# For Ruby
1057all_protocols = []
1058Export('all_protocols')
1059protocol_dirs = []
1060Export('protocol_dirs')
1061slicc_includes = []
1062Export('slicc_includes')
1063
1064# Walk the tree and execute all SConsopts scripts that wil add to the
1065# above variables
1066if GetOption('verbose'):
1067 print "Reading SConsopts"
1068for bdir in [ base_dir ] + extras_dir_list:
1069 if not isdir(bdir):
1070 print "Error: directory '%s' does not exist" % bdir
1071 Exit(1)
1072 for root, dirs, files in os.walk(bdir):
1073 if 'SConsopts' in files:
1074 if GetOption('verbose'):
1075 print "Reading", joinpath(root, 'SConsopts')
1076 SConscript(joinpath(root, 'SConsopts'))
1077
1078all_isa_list.sort()
1079
1080sticky_vars.AddVariables(
1081 EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list),
1082 ListVariable('CPU_MODELS', 'CPU models',
1083 sorted(n for n,m in CpuModel.dict.iteritems() if m.default),
1084 sorted(CpuModel.dict.keys())),
1085 BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger',
1086 False),
1087 BoolVariable('SS_COMPATIBLE_FP',
1088 'Make floating-point results compatible with SimpleScalar',
1089 False),
1090 BoolVariable('USE_SSE2',
1091 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts',
1092 False),
1093 BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock),
1094 BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv),
1095 BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability', False),
1096 BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models', have_kvm),
1097 EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None',
1098 all_protocols),
1099 )
1100
1101# These variables get exported to #defines in config/*.hh (see src/SConscript).
1102export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'CP_ANNOTATE',
1103 'USE_POSIX_CLOCK', 'PROTOCOL', 'HAVE_PROTOBUF',
1104 'HAVE_PERF_ATTR_EXCLUDE_HOST']
1105
1106###################################################
1107#
1108# Define a SCons builder for configuration flag headers.
1109#
1110###################################################
1111
1112# This function generates a config header file that #defines the
1113# variable symbol to the current variable setting (0 or 1). The source
1114# operands are the name of the variable and a Value node containing the
1115# value of the variable.
1116def build_config_file(target, source, env):
1117 (variable, value) = [s.get_contents() for s in source]
1118 f = file(str(target[0]), 'w')
1119 print >> f, '#define', variable, value
1120 f.close()
1121 return None
1122
1123# Combine the two functions into a scons Action object.
1124config_action = MakeAction(build_config_file, Transform("CONFIG H", 2))
1125
1126# The emitter munges the source & target node lists to reflect what
1127# we're really doing.
1128def config_emitter(target, source, env):
1129 # extract variable name from Builder arg
1130 variable = str(target[0])
1131 # True target is config header file
1132 target = joinpath('config', variable.lower() + '.hh')
1133 val = env[variable]
1134 if isinstance(val, bool):
1135 # Force value to 0/1
1136 val = int(val)
1137 elif isinstance(val, str):
1138 val = '"' + val + '"'
1139
1140 # Sources are variable name & value (packaged in SCons Value nodes)
1141 return ([target], [Value(variable), Value(val)])
1142
1143config_builder = Builder(emitter = config_emitter, action = config_action)
1144
1145main.Append(BUILDERS = { 'ConfigFile' : config_builder })
1146
1147# libelf build is shared across all configs in the build root.
1148main.SConscript('ext/libelf/SConscript',
1149 variant_dir = joinpath(build_root, 'libelf'))
1150
1151# gzstream build is shared across all configs in the build root.
1152main.SConscript('ext/gzstream/SConscript',
1153 variant_dir = joinpath(build_root, 'gzstream'))
1154
1155# libfdt build is shared across all configs in the build root.
1156main.SConscript('ext/libfdt/SConscript',
1157 variant_dir = joinpath(build_root, 'libfdt'))
1158
1159# fputils build is shared across all configs in the build root.
1160main.SConscript('ext/fputils/SConscript',
1161 variant_dir = joinpath(build_root, 'fputils'))
1162
1163# DRAMSim2 build is shared across all configs in the build root.
1164main.SConscript('ext/dramsim2/SConscript',
1165 variant_dir = joinpath(build_root, 'dramsim2'))
1166
1167###################################################
1168#
1169# This function is used to set up a directory with switching headers
1170#
1171###################################################
1172
1173main['ALL_ISA_LIST'] = all_isa_list
1174all_isa_deps = {}
1175def make_switching_dir(dname, switch_headers, env):
1176 # Generate the header. target[0] is the full path of the output
1177 # header to generate. 'source' is a dummy variable, since we get the
1178 # list of ISAs from env['ALL_ISA_LIST'].
1179 def gen_switch_hdr(target, source, env):
1180 fname = str(target[0])
1181 isa = env['TARGET_ISA'].lower()
1182 try:
1183 f = open(fname, 'w')
1184 print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname))
1185 f.close()
1186 except IOError:
1187 print "Failed to create %s" % fname
1188 raise
1189
1190 # Build SCons Action object. 'varlist' specifies env vars that this
1191 # action depends on; when env['ALL_ISA_LIST'] changes these actions
1192 # should get re-executed.
1193 switch_hdr_action = MakeAction(gen_switch_hdr,
1194 Transform("GENERATE"), varlist=['ALL_ISA_LIST'])
1195
1196 # Instantiate actions for each header
1197 for hdr in switch_headers:
1198 env.Command(hdr, [], switch_hdr_action)
1199
1200 isa_target = Dir('.').up().name.lower().replace('_', '-')
1201 env['PHONY_BASE'] = '#'+isa_target
1202 all_isa_deps[isa_target] = None
1203
1204Export('make_switching_dir')
1205
1206# all-isas -> all-deps -> all-environs -> all_targets
1207main.Alias('#all-isas', [])
1208main.Alias('#all-deps', '#all-isas')
1209
1210# Dummy target to ensure all environments are created before telling
1211# SCons what to actually make (the command line arguments). We attach
1212# them to the dependence graph after the environments are complete.
1213ORIG_BUILD_TARGETS = list(BUILD_TARGETS) # force a copy; gets closure to work.
1214def environsComplete(target, source, env):
1215 for t in ORIG_BUILD_TARGETS:
1216 main.Depends('#all-targets', t)
1217
1218# Each build/* switching_dir attaches its *-environs target to #all-environs.
1219main.Append(BUILDERS = {'CompleteEnvirons' :
1220 Builder(action=MakeAction(environsComplete, None))})
1221main.CompleteEnvirons('#all-environs', [])
1222
1223def doNothing(**ignored): pass
1224main.Append(BUILDERS = {'Dummy': Builder(action=MakeAction(doNothing, None))})
1225
1226# The final target to which all the original targets ultimately get attached.
1227main.Dummy('#all-targets', '#all-environs')
1228BUILD_TARGETS[:] = ['#all-targets']
1229
1230###################################################
1231#
1232# Define build environments for selected configurations.
1233#
1234###################################################
1235
1236for variant_path in variant_paths:
1237 if not GetOption('silent'):
1238 print "Building in", variant_path
1239
1240 # Make a copy of the build-root environment to use for this config.
1241 env = main.Clone()
1242 env['BUILDDIR'] = variant_path
1243
1244 # variant_dir is the tail component of build path, and is used to
1245 # determine the build parameters (e.g., 'ALPHA_SE')
1246 (build_root, variant_dir) = splitpath(variant_path)
1247
1248 # Set env variables according to the build directory config.
1249 sticky_vars.files = []
1250 # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in
1251 # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke
1252 # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings.
1253 current_vars_file = joinpath(build_root, 'variables', variant_dir)
1254 if isfile(current_vars_file):
1255 sticky_vars.files.append(current_vars_file)
1256 if not GetOption('silent'):
1257 print "Using saved variables file %s" % current_vars_file
1258 else:
1259 # Build dir-specific variables file doesn't exist.
1260
1261 # Make sure the directory is there so we can create it later
1262 opt_dir = dirname(current_vars_file)
1263 if not isdir(opt_dir):
1264 mkdir(opt_dir)
1265
1266 # Get default build variables from source tree. Variables are
1267 # normally determined by name of $VARIANT_DIR, but can be
1268 # overridden by '--default=' arg on command line.
1269 default = GetOption('default')
1270 opts_dir = joinpath(main.root.abspath, 'build_opts')
1271 if default:
1272 default_vars_files = [joinpath(build_root, 'variables', default),
1273 joinpath(opts_dir, default)]
1274 else:
1275 default_vars_files = [joinpath(opts_dir, variant_dir)]
1276 existing_files = filter(isfile, default_vars_files)
1277 if existing_files:
1278 default_vars_file = existing_files[0]
1279 sticky_vars.files.append(default_vars_file)
1280 print "Variables file %s not found,\n using defaults in %s" \
1281 % (current_vars_file, default_vars_file)
1282 else:
1283 print "Error: cannot find variables file %s or " \
1284 "default file(s) %s" \
1285 % (current_vars_file, ' or '.join(default_vars_files))
1286 Exit(1)
1287
1288 # Apply current variable settings to env
1289 sticky_vars.Update(env)
1290
1291 help_texts["local_vars"] += \
1292 "Build variables for %s:\n" % variant_dir \
1293 + sticky_vars.GenerateHelpText(env)
1294
1295 # Process variable settings.
1296
1297 if not have_fenv and env['USE_FENV']:
1298 print "Warning: <fenv.h> not available; " \
1299 "forcing USE_FENV to False in", variant_dir + "."
1300 env['USE_FENV'] = False
1301
1302 if not env['USE_FENV']:
1303 print "Warning: No IEEE FP rounding mode control in", variant_dir + "."
1304 print " FP results may deviate slightly from other platforms."
1305
1306 if env['EFENCE']:
1307 env.Append(LIBS=['efence'])
1308
1309 if env['USE_KVM']:
1310 if not have_kvm:
1311 print "Warning: Can not enable KVM, host seems to lack KVM support"
1312 env['USE_KVM'] = False
1313 elif not have_posix_timers:
1314 print "Warning: Can not enable KVM, host seems to lack support " \
1315 "for POSIX timers"
1316 env['USE_KVM'] = False
1317 elif not is_isa_kvm_compatible(env['TARGET_ISA']):
1318 print "Info: KVM support disabled due to unsupported host and " \
1319 "target ISA combination"
1320 env['USE_KVM'] = False
1321
1322 # Warn about missing optional functionality
1323 if env['USE_KVM']:
1324 if not main['HAVE_PERF_ATTR_EXCLUDE_HOST']:
1325 print "Warning: perf_event headers lack support for the " \
1326 "exclude_host attribute. KVM instruction counts will " \
1327 "be inaccurate."
1328
1329 # Save sticky variable settings back to current variables file
1330 sticky_vars.Save(current_vars_file, env)
1331
1332 if env['USE_SSE2']:
1333 env.Append(CCFLAGS=['-msse2'])
1334
1335 # The src/SConscript file sets up the build rules in 'env' according
1336 # to the configured variables. It returns a list of environments,
1337 # one for each variant build (debug, opt, etc.)
1338 SConscript('src/SConscript', variant_dir = variant_path, exports = 'env')
1339
1340def pairwise(iterable):
1341 "s -> (s0,s1), (s1,s2), (s2, s3), ..."
1342 a, b = itertools.tee(iterable)
1343 b.next()
1344 return itertools.izip(a, b)
1345
1346# Create false dependencies so SCons will parse ISAs, establish
1347# dependencies, and setup the build Environments serially. Either
1348# SCons (likely) and/or our SConscripts (possibly) cannot cope with -j
1349# greater than 1. It appears to be standard race condition stuff; it
1350# doesn't always fail, but usually, and the behaviors are different.
1351# Every time I tried to remove this, builds would fail in some
1352# creative new way. So, don't do that. You'll want to, though, because
1353# tests/SConscript takes a long time to make its Environments.
1354for t1, t2 in pairwise(sorted(all_isa_deps.iterkeys())):
1355 main.Depends('#%s-deps' % t2, '#%s-deps' % t1)
1356 main.Depends('#%s-environs' % t2, '#%s-environs' % t1)
1357
1358# base help text
1359Help('''
1360Usage: scons [scons options] [build variables] [target(s)]
1361
1362Extra scons options:
1363%(options)s
1364
1365Global build variables:
1366%(global_vars)s
1367
1368%(local_vars)s
1369''' % help_texts)