SConstruct (10671:d59e40b074c6) SConstruct (10710:9b71309d29f9)
1# -*- mode:python -*-
2
3# Copyright (c) 2013 ARM Limited
4# All rights reserved.
5#
6# The license below extends only to copyright in the software and shall
7# not be construed as granting a license to any other intellectual
8# property including but not limited to intellectual property relating
9# to a hardware implementation of the functionality of the software
10# licensed hereunder. You may use the software subject to the license
11# terms below provided that you ensure that this notice is replicated
12# unmodified and in its entirety in all distributions of the software,
13# modified or unmodified, in source code or in binary form.
14#
15# Copyright (c) 2011 Advanced Micro Devices, Inc.
16# Copyright (c) 2009 The Hewlett-Packard Development Company
17# Copyright (c) 2004-2005 The Regents of The University of Michigan
18# All rights reserved.
19#
20# Redistribution and use in source and binary forms, with or without
21# modification, are permitted provided that the following conditions are
22# met: redistributions of source code must retain the above copyright
23# notice, this list of conditions and the following disclaimer;
24# redistributions in binary form must reproduce the above copyright
25# notice, this list of conditions and the following disclaimer in the
26# documentation and/or other materials provided with the distribution;
27# neither the name of the copyright holders nor the names of its
28# contributors may be used to endorse or promote products derived from
29# this software without specific prior written permission.
30#
31# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
32# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
33# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
34# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
35# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
37# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
38# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
39# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
40# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
41# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
42#
43# Authors: Steve Reinhardt
44# Nathan Binkert
45
46###################################################
47#
48# SCons top-level build description (SConstruct) file.
49#
50# While in this directory ('gem5'), just type 'scons' to build the default
51# configuration (see below), or type 'scons build/<CONFIG>/<binary>'
52# to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for
53# the optimized full-system version).
54#
55# You can build gem5 in a different directory as long as there is a
56# 'build/<CONFIG>' somewhere along the target path. The build system
57# expects that all configs under the same build directory are being
58# built for the same host system.
59#
60# Examples:
61#
62# The following two commands are equivalent. The '-u' option tells
63# scons to search up the directory tree for this SConstruct file.
64# % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug
65# % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug
66#
67# The following two commands are equivalent and demonstrate building
68# in a directory outside of the source tree. The '-C' option tells
69# scons to chdir to the specified directory to find this SConstruct
70# file.
71# % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug
72# % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug
73#
74# You can use 'scons -H' to print scons options. If you're in this
75# 'gem5' directory (or use -u or -C to tell scons where to find this
76# file), you can use 'scons -h' to print all the gem5-specific build
77# options as well.
78#
79###################################################
80
81# Check for recent-enough Python and SCons versions.
82try:
83 # Really old versions of scons only take two options for the
84 # function, so check once without the revision and once with the
85 # revision, the first instance will fail for stuff other than
86 # 0.98, and the second will fail for 0.98.0
87 EnsureSConsVersion(0, 98)
88 EnsureSConsVersion(0, 98, 1)
89except SystemExit, e:
90 print """
91For more details, see:
92 http://gem5.org/Dependencies
93"""
94 raise
95
96# We ensure the python version early because because python-config
97# requires python 2.5
98try:
99 EnsurePythonVersion(2, 5)
100except SystemExit, e:
101 print """
102You can use a non-default installation of the Python interpreter by
103rearranging your PATH so that scons finds the non-default 'python' and
104'python-config' first.
105
106For more details, see:
107 http://gem5.org/wiki/index.php/Using_a_non-default_Python_installation
108"""
109 raise
110
111# Global Python includes
112import itertools
113import os
114import re
115import subprocess
116import sys
117
118from os import mkdir, environ
119from os.path import abspath, basename, dirname, expanduser, normpath
120from os.path import exists, isdir, isfile
121from os.path import join as joinpath, split as splitpath
122
123# SCons includes
124import SCons
125import SCons.Node
126
127extra_python_paths = [
128 Dir('src/python').srcnode().abspath, # gem5 includes
129 Dir('ext/ply').srcnode().abspath, # ply is used by several files
130 ]
131
132sys.path[1:1] = extra_python_paths
133
134from m5.util import compareVersions, readCommand
135from m5.util.terminal import get_termcap
136
137help_texts = {
138 "options" : "",
139 "global_vars" : "",
140 "local_vars" : ""
141}
142
143Export("help_texts")
144
145
146# There's a bug in scons in that (1) by default, the help texts from
147# AddOption() are supposed to be displayed when you type 'scons -h'
148# and (2) you can override the help displayed by 'scons -h' using the
149# Help() function, but these two features are incompatible: once
150# you've overridden the help text using Help(), there's no way to get
151# at the help texts from AddOptions. See:
152# http://scons.tigris.org/issues/show_bug.cgi?id=2356
153# http://scons.tigris.org/issues/show_bug.cgi?id=2611
154# This hack lets us extract the help text from AddOptions and
155# re-inject it via Help(). Ideally someday this bug will be fixed and
156# we can just use AddOption directly.
157def AddLocalOption(*args, **kwargs):
158 col_width = 30
159
160 help = " " + ", ".join(args)
161 if "help" in kwargs:
162 length = len(help)
163 if length >= col_width:
164 help += "\n" + " " * col_width
165 else:
166 help += " " * (col_width - length)
167 help += kwargs["help"]
168 help_texts["options"] += help + "\n"
169
170 AddOption(*args, **kwargs)
171
172AddLocalOption('--colors', dest='use_colors', action='store_true',
173 help="Add color to abbreviated scons output")
174AddLocalOption('--no-colors', dest='use_colors', action='store_false',
175 help="Don't add color to abbreviated scons output")
176AddLocalOption('--with-cxx-config', dest='with_cxx_config',
177 action='store_true',
178 help="Build with support for C++-based configuration")
179AddLocalOption('--default', dest='default', type='string', action='store',
180 help='Override which build_opts file to use for defaults')
181AddLocalOption('--ignore-style', dest='ignore_style', action='store_true',
182 help='Disable style checking hooks')
183AddLocalOption('--no-lto', dest='no_lto', action='store_true',
184 help='Disable Link-Time Optimization for fast')
185AddLocalOption('--update-ref', dest='update_ref', action='store_true',
186 help='Update test reference outputs')
187AddLocalOption('--verbose', dest='verbose', action='store_true',
188 help='Print full tool command lines')
189AddLocalOption('--without-python', dest='without_python',
190 action='store_true',
191 help='Build without Python configuration support')
192AddLocalOption('--without-tcmalloc', dest='without_tcmalloc',
193 action='store_true',
194 help='Disable linking against tcmalloc')
195AddLocalOption('--with-ubsan', dest='with_ubsan', action='store_true',
196 help='Build with Undefined Behavior Sanitizer if available')
197
198termcap = get_termcap(GetOption('use_colors'))
199
200########################################################################
201#
202# Set up the main build environment.
203#
204########################################################################
205
206# export TERM so that clang reports errors in color
207use_vars = set([ 'AS', 'AR', 'CC', 'CXX', 'HOME', 'LD_LIBRARY_PATH',
208 'LIBRARY_PATH', 'PATH', 'PKG_CONFIG_PATH', 'PROTOC',
209 'PYTHONPATH', 'RANLIB', 'SWIG', 'TERM' ])
210
211use_prefixes = [
212 "M5", # M5 configuration (e.g., path to kernels)
213 "DISTCC_", # distcc (distributed compiler wrapper) configuration
214 "CCACHE_", # ccache (caching compiler wrapper) configuration
215 "CCC_", # clang static analyzer configuration
216 ]
217
218use_env = {}
219for key,val in sorted(os.environ.iteritems()):
220 if key in use_vars or \
221 any([key.startswith(prefix) for prefix in use_prefixes]):
222 use_env[key] = val
223
224# Tell scons to avoid implicit command dependencies to avoid issues
225# with the param wrappes being compiled twice (see
226# http://scons.tigris.org/issues/show_bug.cgi?id=2811)
227main = Environment(ENV=use_env, IMPLICIT_COMMAND_DEPENDENCIES=0)
228main.Decider('MD5-timestamp')
229main.root = Dir(".") # The current directory (where this file lives).
230main.srcdir = Dir("src") # The source directory
231
232main_dict_keys = main.Dictionary().keys()
233
234# Check that we have a C/C++ compiler
235if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys):
236 print "No C++ compiler installed (package g++ on Ubuntu and RedHat)"
237 Exit(1)
238
239# Check that swig is present
240if not 'SWIG' in main_dict_keys:
241 print "swig is not installed (package swig on Ubuntu and RedHat)"
242 Exit(1)
243
244# add useful python code PYTHONPATH so it can be used by subprocesses
245# as well
246main.AppendENVPath('PYTHONPATH', extra_python_paths)
247
248########################################################################
249#
250# Mercurial Stuff.
251#
252# If the gem5 directory is a mercurial repository, we should do some
253# extra things.
254#
255########################################################################
256
257hgdir = main.root.Dir(".hg")
258
259mercurial_style_message = """
260You're missing the gem5 style hook, which automatically checks your code
261against the gem5 style rules on hg commit and qrefresh commands. This
262script will now install the hook in your .hg/hgrc file.
263Press enter to continue, or ctrl-c to abort: """
264
265mercurial_style_hook = """
266# The following lines were automatically added by gem5/SConstruct
267# to provide the gem5 style-checking hooks
268[extensions]
269style = %s/util/style.py
270
271[hooks]
272pretxncommit.style = python:style.check_style
273pre-qrefresh.style = python:style.check_style
274# End of SConstruct additions
275
276""" % (main.root.abspath)
277
278mercurial_lib_not_found = """
279Mercurial libraries cannot be found, ignoring style hook. If
280you are a gem5 developer, please fix this and run the style
281hook. It is important.
282"""
283
284# Check for style hook and prompt for installation if it's not there.
285# Skip this if --ignore-style was specified, there's no .hg dir to
286# install a hook in, or there's no interactive terminal to prompt.
287if not GetOption('ignore_style') and hgdir.exists() and sys.stdin.isatty():
288 style_hook = True
289 try:
290 from mercurial import ui
291 ui = ui.ui()
292 ui.readconfig(hgdir.File('hgrc').abspath)
293 style_hook = ui.config('hooks', 'pretxncommit.style', None) and \
294 ui.config('hooks', 'pre-qrefresh.style', None)
295 except ImportError:
296 print mercurial_lib_not_found
297
298 if not style_hook:
299 print mercurial_style_message,
300 # continue unless user does ctrl-c/ctrl-d etc.
301 try:
302 raw_input()
303 except:
304 print "Input exception, exiting scons.\n"
305 sys.exit(1)
306 hgrc_path = '%s/.hg/hgrc' % main.root.abspath
307 print "Adding style hook to", hgrc_path, "\n"
308 try:
309 hgrc = open(hgrc_path, 'a')
310 hgrc.write(mercurial_style_hook)
311 hgrc.close()
312 except:
313 print "Error updating", hgrc_path
314 sys.exit(1)
315
316
317###################################################
318#
319# Figure out which configurations to set up based on the path(s) of
320# the target(s).
321#
322###################################################
323
324# Find default configuration & binary.
325Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug'))
326
327# helper function: find last occurrence of element in list
328def rfind(l, elt, offs = -1):
329 for i in range(len(l)+offs, 0, -1):
330 if l[i] == elt:
331 return i
332 raise ValueError, "element not found"
333
334# Take a list of paths (or SCons Nodes) and return a list with all
335# paths made absolute and ~-expanded. Paths will be interpreted
336# relative to the launch directory unless a different root is provided
337def makePathListAbsolute(path_list, root=GetLaunchDir()):
338 return [abspath(joinpath(root, expanduser(str(p))))
339 for p in path_list]
340
341# Each target must have 'build' in the interior of the path; the
342# directory below this will determine the build parameters. For
343# example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we
344# recognize that ALPHA_SE specifies the configuration because it
345# follow 'build' in the build path.
346
347# The funky assignment to "[:]" is needed to replace the list contents
348# in place rather than reassign the symbol to a new list, which
349# doesn't work (obviously!).
350BUILD_TARGETS[:] = makePathListAbsolute(BUILD_TARGETS)
351
352# Generate a list of the unique build roots and configs that the
353# collected targets reference.
354variant_paths = []
355build_root = None
356for t in BUILD_TARGETS:
357 path_dirs = t.split('/')
358 try:
359 build_top = rfind(path_dirs, 'build', -2)
360 except:
361 print "Error: no non-leaf 'build' dir found on target path", t
362 Exit(1)
363 this_build_root = joinpath('/',*path_dirs[:build_top+1])
364 if not build_root:
365 build_root = this_build_root
366 else:
367 if this_build_root != build_root:
368 print "Error: build targets not under same build root\n"\
369 " %s\n %s" % (build_root, this_build_root)
370 Exit(1)
371 variant_path = joinpath('/',*path_dirs[:build_top+2])
372 if variant_path not in variant_paths:
373 variant_paths.append(variant_path)
374
375# Make sure build_root exists (might not if this is the first build there)
376if not isdir(build_root):
377 mkdir(build_root)
378main['BUILDROOT'] = build_root
379
380Export('main')
381
382main.SConsignFile(joinpath(build_root, "sconsign"))
383
384# Default duplicate option is to use hard links, but this messes up
385# when you use emacs to edit a file in the target dir, as emacs moves
386# file to file~ then copies to file, breaking the link. Symbolic
387# (soft) links work better.
388main.SetOption('duplicate', 'soft-copy')
389
390#
391# Set up global sticky variables... these are common to an entire build
392# tree (not specific to a particular build like ALPHA_SE)
393#
394
395global_vars_file = joinpath(build_root, 'variables.global')
396
397global_vars = Variables(global_vars_file, args=ARGUMENTS)
398
399global_vars.AddVariables(
400 ('CC', 'C compiler', environ.get('CC', main['CC'])),
401 ('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])),
402 ('SWIG', 'SWIG tool', environ.get('SWIG', main['SWIG'])),
403 ('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')),
404 ('BATCH', 'Use batch pool for build and tests', False),
405 ('BATCH_CMD', 'Batch pool submission command name', 'qdo'),
406 ('M5_BUILD_CACHE', 'Cache built objects in this directory', False),
407 ('EXTRAS', 'Add extra directories to the compilation', '')
408 )
409
410# Update main environment with values from ARGUMENTS & global_vars_file
411global_vars.Update(main)
412help_texts["global_vars"] += global_vars.GenerateHelpText(main)
413
414# Save sticky variable settings back to current variables file
415global_vars.Save(global_vars_file, main)
416
417# Parse EXTRAS variable to build list of all directories where we're
418# look for sources etc. This list is exported as extras_dir_list.
419base_dir = main.srcdir.abspath
420if main['EXTRAS']:
421 extras_dir_list = makePathListAbsolute(main['EXTRAS'].split(':'))
422else:
423 extras_dir_list = []
424
425Export('base_dir')
426Export('extras_dir_list')
427
428# the ext directory should be on the #includes path
429main.Append(CPPPATH=[Dir('ext')])
430
431def strip_build_path(path, env):
432 path = str(path)
433 variant_base = env['BUILDROOT'] + os.path.sep
434 if path.startswith(variant_base):
435 path = path[len(variant_base):]
436 elif path.startswith('build/'):
437 path = path[6:]
438 return path
439
440# Generate a string of the form:
441# common/path/prefix/src1, src2 -> tgt1, tgt2
442# to print while building.
443class Transform(object):
444 # all specific color settings should be here and nowhere else
445 tool_color = termcap.Normal
446 pfx_color = termcap.Yellow
447 srcs_color = termcap.Yellow + termcap.Bold
448 arrow_color = termcap.Blue + termcap.Bold
449 tgts_color = termcap.Yellow + termcap.Bold
450
451 def __init__(self, tool, max_sources=99):
452 self.format = self.tool_color + (" [%8s] " % tool) \
453 + self.pfx_color + "%s" \
454 + self.srcs_color + "%s" \
455 + self.arrow_color + " -> " \
456 + self.tgts_color + "%s" \
457 + termcap.Normal
458 self.max_sources = max_sources
459
460 def __call__(self, target, source, env, for_signature=None):
461 # truncate source list according to max_sources param
462 source = source[0:self.max_sources]
463 def strip(f):
464 return strip_build_path(str(f), env)
465 if len(source) > 0:
466 srcs = map(strip, source)
467 else:
468 srcs = ['']
469 tgts = map(strip, target)
470 # surprisingly, os.path.commonprefix is a dumb char-by-char string
471 # operation that has nothing to do with paths.
472 com_pfx = os.path.commonprefix(srcs + tgts)
473 com_pfx_len = len(com_pfx)
474 if com_pfx:
475 # do some cleanup and sanity checking on common prefix
476 if com_pfx[-1] == ".":
477 # prefix matches all but file extension: ok
478 # back up one to change 'foo.cc -> o' to 'foo.cc -> .o'
479 com_pfx = com_pfx[0:-1]
480 elif com_pfx[-1] == "/":
481 # common prefix is directory path: OK
482 pass
483 else:
484 src0_len = len(srcs[0])
485 tgt0_len = len(tgts[0])
486 if src0_len == com_pfx_len:
487 # source is a substring of target, OK
488 pass
489 elif tgt0_len == com_pfx_len:
490 # target is a substring of source, need to back up to
491 # avoid empty string on RHS of arrow
492 sep_idx = com_pfx.rfind(".")
493 if sep_idx != -1:
494 com_pfx = com_pfx[0:sep_idx]
495 else:
496 com_pfx = ''
497 elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".":
498 # still splitting at file extension: ok
499 pass
500 else:
501 # probably a fluke; ignore it
502 com_pfx = ''
503 # recalculate length in case com_pfx was modified
504 com_pfx_len = len(com_pfx)
505 def fmt(files):
506 f = map(lambda s: s[com_pfx_len:], files)
507 return ', '.join(f)
508 return self.format % (com_pfx, fmt(srcs), fmt(tgts))
509
510Export('Transform')
511
512# enable the regression script to use the termcap
513main['TERMCAP'] = termcap
514
515if GetOption('verbose'):
516 def MakeAction(action, string, *args, **kwargs):
517 return Action(action, *args, **kwargs)
518else:
519 MakeAction = Action
520 main['CCCOMSTR'] = Transform("CC")
521 main['CXXCOMSTR'] = Transform("CXX")
522 main['ASCOMSTR'] = Transform("AS")
523 main['SWIGCOMSTR'] = Transform("SWIG")
524 main['ARCOMSTR'] = Transform("AR", 0)
525 main['LINKCOMSTR'] = Transform("LINK", 0)
526 main['RANLIBCOMSTR'] = Transform("RANLIB", 0)
527 main['M4COMSTR'] = Transform("M4")
528 main['SHCCCOMSTR'] = Transform("SHCC")
529 main['SHCXXCOMSTR'] = Transform("SHCXX")
530Export('MakeAction')
531
532# Initialize the Link-Time Optimization (LTO) flags
533main['LTO_CCFLAGS'] = []
534main['LTO_LDFLAGS'] = []
535
536# According to the readme, tcmalloc works best if the compiler doesn't
537# assume that we're using the builtin malloc and friends. These flags
538# are compiler-specific, so we need to set them after we detect which
539# compiler we're using.
540main['TCMALLOC_CCFLAGS'] = []
541
542CXX_version = readCommand([main['CXX'],'--version'], exception=False)
543CXX_V = readCommand([main['CXX'],'-V'], exception=False)
544
545main['GCC'] = CXX_version and CXX_version.find('g++') >= 0
546main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0
547if main['GCC'] + main['CLANG'] > 1:
548 print 'Error: How can we have two at the same time?'
549 Exit(1)
550
551# Set up default C++ compiler flags
552if main['GCC'] or main['CLANG']:
553 # As gcc and clang share many flags, do the common parts here
554 main.Append(CCFLAGS=['-pipe'])
555 main.Append(CCFLAGS=['-fno-strict-aliasing'])
556 # Enable -Wall and then disable the few warnings that we
557 # consistently violate
558 main.Append(CCFLAGS=['-Wall', '-Wno-sign-compare', '-Wundef'])
559 # We always compile using C++11, but only gcc >= 4.7 and clang 3.1
560 # actually use that name, so we stick with c++0x
561 main.Append(CXXFLAGS=['-std=c++0x'])
562 # Add selected sanity checks from -Wextra
563 main.Append(CXXFLAGS=['-Wmissing-field-initializers',
564 '-Woverloaded-virtual'])
565else:
566 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
567 print "Don't know what compiler options to use for your compiler."
568 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX']
569 print termcap.Yellow + ' version:' + termcap.Normal,
570 if not CXX_version:
571 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
572 termcap.Normal
573 else:
574 print CXX_version.replace('\n', '<nl>')
575 print " If you're trying to use a compiler other than GCC"
576 print " or clang, there appears to be something wrong with your"
577 print " environment."
578 print " "
579 print " If you are trying to use a compiler other than those listed"
580 print " above you will need to ease fix SConstruct and "
581 print " src/SConscript to support that compiler."
582 Exit(1)
583
584if main['GCC']:
585 # Check for a supported version of gcc. >= 4.6 is chosen for its
586 # level of c++11 support. See
587 # http://gcc.gnu.org/projects/cxx0x.html for details. 4.6 is also
588 # the first version with proper LTO support.
589 gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False)
590 if compareVersions(gcc_version, "4.6") < 0:
591 print 'Error: gcc version 4.6 or newer required.'
592 print ' Installed version:', gcc_version
593 Exit(1)
594
595 main['GCC_VERSION'] = gcc_version
596
597 # gcc from version 4.8 and above generates "rep; ret" instructions
598 # to avoid performance penalties on certain AMD chips. Older
599 # assemblers detect this as an error, "Error: expecting string
600 # instruction after `rep'"
601 if compareVersions(gcc_version, "4.8") > 0:
602 as_version = readCommand([main['AS'], '-v', '/dev/null'],
603 exception=False).split()
604
605 if not as_version or compareVersions(as_version[-1], "2.23") < 0:
606 print termcap.Yellow + termcap.Bold + \
607 'Warning: This combination of gcc and binutils have' + \
608 ' known incompatibilities.\n' + \
609 ' If you encounter build problems, please update ' + \
610 'binutils to 2.23.' + \
611 termcap.Normal
612
613 # Make sure we warn if the user has requested to compile with the
614 # Undefined Benahvior Sanitizer and this version of gcc does not
615 # support it.
616 if GetOption('with_ubsan') and \
617 compareVersions(gcc_version, '4.9') < 0:
618 print termcap.Yellow + termcap.Bold + \
619 'Warning: UBSan is only supported using gcc 4.9 and later.' + \
620 termcap.Normal
621
622 # Add the appropriate Link-Time Optimization (LTO) flags
623 # unless LTO is explicitly turned off. Note that these flags
624 # are only used by the fast target.
625 if not GetOption('no_lto'):
626 # Pass the LTO flag when compiling to produce GIMPLE
627 # output, we merely create the flags here and only append
628 # them later
629 main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
630
631 # Use the same amount of jobs for LTO as we are running
632 # scons with
633 main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
634
635 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc',
636 '-fno-builtin-realloc', '-fno-builtin-free'])
637
638elif main['CLANG']:
639 # Check for a supported version of clang, >= 3.0 is needed to
640 # support similar features as gcc 4.6. See
641 # http://clang.llvm.org/cxx_status.html for details
642 clang_version_re = re.compile(".* version (\d+\.\d+)")
643 clang_version_match = clang_version_re.search(CXX_version)
644 if (clang_version_match):
645 clang_version = clang_version_match.groups()[0]
646 if compareVersions(clang_version, "3.0") < 0:
647 print 'Error: clang version 3.0 or newer required.'
648 print ' Installed version:', clang_version
649 Exit(1)
650 else:
651 print 'Error: Unable to determine clang version.'
652 Exit(1)
653
654 # clang has a few additional warnings that we disable,
655 # tautological comparisons are allowed due to unsigned integers
656 # being compared to constants that happen to be 0, and extraneous
657 # parantheses are allowed due to Ruby's printing of the AST,
658 # finally self assignments are allowed as the generated CPU code
659 # is relying on this
660 main.Append(CCFLAGS=['-Wno-tautological-compare',
661 '-Wno-parentheses',
662 '-Wno-self-assign',
663 # Some versions of libstdc++ (4.8?) seem to
664 # use struct hash and class hash
665 # interchangeably.
666 '-Wno-mismatched-tags',
667 ])
668
669 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin'])
670
671 # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as
672 # opposed to libstdc++, as the later is dated.
673 if sys.platform == "darwin":
674 main.Append(CXXFLAGS=['-stdlib=libc++'])
675 main.Append(LIBS=['c++'])
676
677else:
678 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
679 print "Don't know what compiler options to use for your compiler."
680 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX']
681 print termcap.Yellow + ' version:' + termcap.Normal,
682 if not CXX_version:
683 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
684 termcap.Normal
685 else:
686 print CXX_version.replace('\n', '<nl>')
687 print " If you're trying to use a compiler other than GCC"
688 print " or clang, there appears to be something wrong with your"
689 print " environment."
690 print " "
691 print " If you are trying to use a compiler other than those listed"
692 print " above you will need to ease fix SConstruct and "
693 print " src/SConscript to support that compiler."
694 Exit(1)
695
696# Set up common yacc/bison flags (needed for Ruby)
697main['YACCFLAGS'] = '-d'
698main['YACCHXXFILESUFFIX'] = '.hh'
699
700# Do this after we save setting back, or else we'll tack on an
701# extra 'qdo' every time we run scons.
702if main['BATCH']:
703 main['CC'] = main['BATCH_CMD'] + ' ' + main['CC']
704 main['CXX'] = main['BATCH_CMD'] + ' ' + main['CXX']
705 main['AS'] = main['BATCH_CMD'] + ' ' + main['AS']
706 main['AR'] = main['BATCH_CMD'] + ' ' + main['AR']
707 main['RANLIB'] = main['BATCH_CMD'] + ' ' + main['RANLIB']
708
709if sys.platform == 'cygwin':
710 # cygwin has some header file issues...
711 main.Append(CCFLAGS=["-Wno-uninitialized"])
712
713# Check for the protobuf compiler
714protoc_version = readCommand([main['PROTOC'], '--version'],
715 exception='').split()
716
717# First two words should be "libprotoc x.y.z"
718if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc':
719 print termcap.Yellow + termcap.Bold + \
720 'Warning: Protocol buffer compiler (protoc) not found.\n' + \
721 ' Please install protobuf-compiler for tracing support.' + \
722 termcap.Normal
723 main['PROTOC'] = False
724else:
725 # Based on the availability of the compress stream wrappers,
726 # require 2.1.0
727 min_protoc_version = '2.1.0'
728 if compareVersions(protoc_version[1], min_protoc_version) < 0:
729 print termcap.Yellow + termcap.Bold + \
730 'Warning: protoc version', min_protoc_version, \
731 'or newer required.\n' + \
732 ' Installed version:', protoc_version[1], \
733 termcap.Normal
734 main['PROTOC'] = False
735 else:
736 # Attempt to determine the appropriate include path and
737 # library path using pkg-config, that means we also need to
738 # check for pkg-config. Note that it is possible to use
739 # protobuf without the involvement of pkg-config. Later on we
740 # check go a library config check and at that point the test
741 # will fail if libprotobuf cannot be found.
742 if readCommand(['pkg-config', '--version'], exception=''):
743 try:
744 # Attempt to establish what linking flags to add for protobuf
745 # using pkg-config
746 main.ParseConfig('pkg-config --cflags --libs-only-L protobuf')
747 except:
748 print termcap.Yellow + termcap.Bold + \
749 'Warning: pkg-config could not get protobuf flags.' + \
750 termcap.Normal
751
752# Check for SWIG
753if not main.has_key('SWIG'):
754 print 'Error: SWIG utility not found.'
755 print ' Please install (see http://www.swig.org) and retry.'
756 Exit(1)
757
758# Check for appropriate SWIG version
759swig_version = readCommand([main['SWIG'], '-version'], exception='').split()
760# First 3 words should be "SWIG Version x.y.z"
761if len(swig_version) < 3 or \
762 swig_version[0] != 'SWIG' or swig_version[1] != 'Version':
763 print 'Error determining SWIG version.'
764 Exit(1)
765
766min_swig_version = '2.0.4'
767if compareVersions(swig_version[2], min_swig_version) < 0:
768 print 'Error: SWIG version', min_swig_version, 'or newer required.'
769 print ' Installed version:', swig_version[2]
770 Exit(1)
771
772# Check for known incompatibilities. The standard library shipped with
773# gcc >= 4.9 does not play well with swig versions prior to 3.0
774if main['GCC'] and compareVersions(gcc_version, '4.9') >= 0 and \
775 compareVersions(swig_version[2], '3.0') < 0:
776 print termcap.Yellow + termcap.Bold + \
777 'Warning: This combination of gcc and swig have' + \
778 ' known incompatibilities.\n' + \
779 ' If you encounter build problems, please update ' + \
780 'swig to 3.0 or later.' + \
781 termcap.Normal
782
783# Set up SWIG flags & scanner
784swig_flags=Split('-c++ -python -modern -templatereduce $_CPPINCFLAGS')
785main.Append(SWIGFLAGS=swig_flags)
786
1# -*- mode:python -*-
2
3# Copyright (c) 2013 ARM Limited
4# All rights reserved.
5#
6# The license below extends only to copyright in the software and shall
7# not be construed as granting a license to any other intellectual
8# property including but not limited to intellectual property relating
9# to a hardware implementation of the functionality of the software
10# licensed hereunder. You may use the software subject to the license
11# terms below provided that you ensure that this notice is replicated
12# unmodified and in its entirety in all distributions of the software,
13# modified or unmodified, in source code or in binary form.
14#
15# Copyright (c) 2011 Advanced Micro Devices, Inc.
16# Copyright (c) 2009 The Hewlett-Packard Development Company
17# Copyright (c) 2004-2005 The Regents of The University of Michigan
18# All rights reserved.
19#
20# Redistribution and use in source and binary forms, with or without
21# modification, are permitted provided that the following conditions are
22# met: redistributions of source code must retain the above copyright
23# notice, this list of conditions and the following disclaimer;
24# redistributions in binary form must reproduce the above copyright
25# notice, this list of conditions and the following disclaimer in the
26# documentation and/or other materials provided with the distribution;
27# neither the name of the copyright holders nor the names of its
28# contributors may be used to endorse or promote products derived from
29# this software without specific prior written permission.
30#
31# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
32# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
33# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
34# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
35# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
37# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
38# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
39# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
40# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
41# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
42#
43# Authors: Steve Reinhardt
44# Nathan Binkert
45
46###################################################
47#
48# SCons top-level build description (SConstruct) file.
49#
50# While in this directory ('gem5'), just type 'scons' to build the default
51# configuration (see below), or type 'scons build/<CONFIG>/<binary>'
52# to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for
53# the optimized full-system version).
54#
55# You can build gem5 in a different directory as long as there is a
56# 'build/<CONFIG>' somewhere along the target path. The build system
57# expects that all configs under the same build directory are being
58# built for the same host system.
59#
60# Examples:
61#
62# The following two commands are equivalent. The '-u' option tells
63# scons to search up the directory tree for this SConstruct file.
64# % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug
65# % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug
66#
67# The following two commands are equivalent and demonstrate building
68# in a directory outside of the source tree. The '-C' option tells
69# scons to chdir to the specified directory to find this SConstruct
70# file.
71# % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug
72# % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug
73#
74# You can use 'scons -H' to print scons options. If you're in this
75# 'gem5' directory (or use -u or -C to tell scons where to find this
76# file), you can use 'scons -h' to print all the gem5-specific build
77# options as well.
78#
79###################################################
80
81# Check for recent-enough Python and SCons versions.
82try:
83 # Really old versions of scons only take two options for the
84 # function, so check once without the revision and once with the
85 # revision, the first instance will fail for stuff other than
86 # 0.98, and the second will fail for 0.98.0
87 EnsureSConsVersion(0, 98)
88 EnsureSConsVersion(0, 98, 1)
89except SystemExit, e:
90 print """
91For more details, see:
92 http://gem5.org/Dependencies
93"""
94 raise
95
96# We ensure the python version early because because python-config
97# requires python 2.5
98try:
99 EnsurePythonVersion(2, 5)
100except SystemExit, e:
101 print """
102You can use a non-default installation of the Python interpreter by
103rearranging your PATH so that scons finds the non-default 'python' and
104'python-config' first.
105
106For more details, see:
107 http://gem5.org/wiki/index.php/Using_a_non-default_Python_installation
108"""
109 raise
110
111# Global Python includes
112import itertools
113import os
114import re
115import subprocess
116import sys
117
118from os import mkdir, environ
119from os.path import abspath, basename, dirname, expanduser, normpath
120from os.path import exists, isdir, isfile
121from os.path import join as joinpath, split as splitpath
122
123# SCons includes
124import SCons
125import SCons.Node
126
127extra_python_paths = [
128 Dir('src/python').srcnode().abspath, # gem5 includes
129 Dir('ext/ply').srcnode().abspath, # ply is used by several files
130 ]
131
132sys.path[1:1] = extra_python_paths
133
134from m5.util import compareVersions, readCommand
135from m5.util.terminal import get_termcap
136
137help_texts = {
138 "options" : "",
139 "global_vars" : "",
140 "local_vars" : ""
141}
142
143Export("help_texts")
144
145
146# There's a bug in scons in that (1) by default, the help texts from
147# AddOption() are supposed to be displayed when you type 'scons -h'
148# and (2) you can override the help displayed by 'scons -h' using the
149# Help() function, but these two features are incompatible: once
150# you've overridden the help text using Help(), there's no way to get
151# at the help texts from AddOptions. See:
152# http://scons.tigris.org/issues/show_bug.cgi?id=2356
153# http://scons.tigris.org/issues/show_bug.cgi?id=2611
154# This hack lets us extract the help text from AddOptions and
155# re-inject it via Help(). Ideally someday this bug will be fixed and
156# we can just use AddOption directly.
157def AddLocalOption(*args, **kwargs):
158 col_width = 30
159
160 help = " " + ", ".join(args)
161 if "help" in kwargs:
162 length = len(help)
163 if length >= col_width:
164 help += "\n" + " " * col_width
165 else:
166 help += " " * (col_width - length)
167 help += kwargs["help"]
168 help_texts["options"] += help + "\n"
169
170 AddOption(*args, **kwargs)
171
172AddLocalOption('--colors', dest='use_colors', action='store_true',
173 help="Add color to abbreviated scons output")
174AddLocalOption('--no-colors', dest='use_colors', action='store_false',
175 help="Don't add color to abbreviated scons output")
176AddLocalOption('--with-cxx-config', dest='with_cxx_config',
177 action='store_true',
178 help="Build with support for C++-based configuration")
179AddLocalOption('--default', dest='default', type='string', action='store',
180 help='Override which build_opts file to use for defaults')
181AddLocalOption('--ignore-style', dest='ignore_style', action='store_true',
182 help='Disable style checking hooks')
183AddLocalOption('--no-lto', dest='no_lto', action='store_true',
184 help='Disable Link-Time Optimization for fast')
185AddLocalOption('--update-ref', dest='update_ref', action='store_true',
186 help='Update test reference outputs')
187AddLocalOption('--verbose', dest='verbose', action='store_true',
188 help='Print full tool command lines')
189AddLocalOption('--without-python', dest='without_python',
190 action='store_true',
191 help='Build without Python configuration support')
192AddLocalOption('--without-tcmalloc', dest='without_tcmalloc',
193 action='store_true',
194 help='Disable linking against tcmalloc')
195AddLocalOption('--with-ubsan', dest='with_ubsan', action='store_true',
196 help='Build with Undefined Behavior Sanitizer if available')
197
198termcap = get_termcap(GetOption('use_colors'))
199
200########################################################################
201#
202# Set up the main build environment.
203#
204########################################################################
205
206# export TERM so that clang reports errors in color
207use_vars = set([ 'AS', 'AR', 'CC', 'CXX', 'HOME', 'LD_LIBRARY_PATH',
208 'LIBRARY_PATH', 'PATH', 'PKG_CONFIG_PATH', 'PROTOC',
209 'PYTHONPATH', 'RANLIB', 'SWIG', 'TERM' ])
210
211use_prefixes = [
212 "M5", # M5 configuration (e.g., path to kernels)
213 "DISTCC_", # distcc (distributed compiler wrapper) configuration
214 "CCACHE_", # ccache (caching compiler wrapper) configuration
215 "CCC_", # clang static analyzer configuration
216 ]
217
218use_env = {}
219for key,val in sorted(os.environ.iteritems()):
220 if key in use_vars or \
221 any([key.startswith(prefix) for prefix in use_prefixes]):
222 use_env[key] = val
223
224# Tell scons to avoid implicit command dependencies to avoid issues
225# with the param wrappes being compiled twice (see
226# http://scons.tigris.org/issues/show_bug.cgi?id=2811)
227main = Environment(ENV=use_env, IMPLICIT_COMMAND_DEPENDENCIES=0)
228main.Decider('MD5-timestamp')
229main.root = Dir(".") # The current directory (where this file lives).
230main.srcdir = Dir("src") # The source directory
231
232main_dict_keys = main.Dictionary().keys()
233
234# Check that we have a C/C++ compiler
235if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys):
236 print "No C++ compiler installed (package g++ on Ubuntu and RedHat)"
237 Exit(1)
238
239# Check that swig is present
240if not 'SWIG' in main_dict_keys:
241 print "swig is not installed (package swig on Ubuntu and RedHat)"
242 Exit(1)
243
244# add useful python code PYTHONPATH so it can be used by subprocesses
245# as well
246main.AppendENVPath('PYTHONPATH', extra_python_paths)
247
248########################################################################
249#
250# Mercurial Stuff.
251#
252# If the gem5 directory is a mercurial repository, we should do some
253# extra things.
254#
255########################################################################
256
257hgdir = main.root.Dir(".hg")
258
259mercurial_style_message = """
260You're missing the gem5 style hook, which automatically checks your code
261against the gem5 style rules on hg commit and qrefresh commands. This
262script will now install the hook in your .hg/hgrc file.
263Press enter to continue, or ctrl-c to abort: """
264
265mercurial_style_hook = """
266# The following lines were automatically added by gem5/SConstruct
267# to provide the gem5 style-checking hooks
268[extensions]
269style = %s/util/style.py
270
271[hooks]
272pretxncommit.style = python:style.check_style
273pre-qrefresh.style = python:style.check_style
274# End of SConstruct additions
275
276""" % (main.root.abspath)
277
278mercurial_lib_not_found = """
279Mercurial libraries cannot be found, ignoring style hook. If
280you are a gem5 developer, please fix this and run the style
281hook. It is important.
282"""
283
284# Check for style hook and prompt for installation if it's not there.
285# Skip this if --ignore-style was specified, there's no .hg dir to
286# install a hook in, or there's no interactive terminal to prompt.
287if not GetOption('ignore_style') and hgdir.exists() and sys.stdin.isatty():
288 style_hook = True
289 try:
290 from mercurial import ui
291 ui = ui.ui()
292 ui.readconfig(hgdir.File('hgrc').abspath)
293 style_hook = ui.config('hooks', 'pretxncommit.style', None) and \
294 ui.config('hooks', 'pre-qrefresh.style', None)
295 except ImportError:
296 print mercurial_lib_not_found
297
298 if not style_hook:
299 print mercurial_style_message,
300 # continue unless user does ctrl-c/ctrl-d etc.
301 try:
302 raw_input()
303 except:
304 print "Input exception, exiting scons.\n"
305 sys.exit(1)
306 hgrc_path = '%s/.hg/hgrc' % main.root.abspath
307 print "Adding style hook to", hgrc_path, "\n"
308 try:
309 hgrc = open(hgrc_path, 'a')
310 hgrc.write(mercurial_style_hook)
311 hgrc.close()
312 except:
313 print "Error updating", hgrc_path
314 sys.exit(1)
315
316
317###################################################
318#
319# Figure out which configurations to set up based on the path(s) of
320# the target(s).
321#
322###################################################
323
324# Find default configuration & binary.
325Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug'))
326
327# helper function: find last occurrence of element in list
328def rfind(l, elt, offs = -1):
329 for i in range(len(l)+offs, 0, -1):
330 if l[i] == elt:
331 return i
332 raise ValueError, "element not found"
333
334# Take a list of paths (or SCons Nodes) and return a list with all
335# paths made absolute and ~-expanded. Paths will be interpreted
336# relative to the launch directory unless a different root is provided
337def makePathListAbsolute(path_list, root=GetLaunchDir()):
338 return [abspath(joinpath(root, expanduser(str(p))))
339 for p in path_list]
340
341# Each target must have 'build' in the interior of the path; the
342# directory below this will determine the build parameters. For
343# example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we
344# recognize that ALPHA_SE specifies the configuration because it
345# follow 'build' in the build path.
346
347# The funky assignment to "[:]" is needed to replace the list contents
348# in place rather than reassign the symbol to a new list, which
349# doesn't work (obviously!).
350BUILD_TARGETS[:] = makePathListAbsolute(BUILD_TARGETS)
351
352# Generate a list of the unique build roots and configs that the
353# collected targets reference.
354variant_paths = []
355build_root = None
356for t in BUILD_TARGETS:
357 path_dirs = t.split('/')
358 try:
359 build_top = rfind(path_dirs, 'build', -2)
360 except:
361 print "Error: no non-leaf 'build' dir found on target path", t
362 Exit(1)
363 this_build_root = joinpath('/',*path_dirs[:build_top+1])
364 if not build_root:
365 build_root = this_build_root
366 else:
367 if this_build_root != build_root:
368 print "Error: build targets not under same build root\n"\
369 " %s\n %s" % (build_root, this_build_root)
370 Exit(1)
371 variant_path = joinpath('/',*path_dirs[:build_top+2])
372 if variant_path not in variant_paths:
373 variant_paths.append(variant_path)
374
375# Make sure build_root exists (might not if this is the first build there)
376if not isdir(build_root):
377 mkdir(build_root)
378main['BUILDROOT'] = build_root
379
380Export('main')
381
382main.SConsignFile(joinpath(build_root, "sconsign"))
383
384# Default duplicate option is to use hard links, but this messes up
385# when you use emacs to edit a file in the target dir, as emacs moves
386# file to file~ then copies to file, breaking the link. Symbolic
387# (soft) links work better.
388main.SetOption('duplicate', 'soft-copy')
389
390#
391# Set up global sticky variables... these are common to an entire build
392# tree (not specific to a particular build like ALPHA_SE)
393#
394
395global_vars_file = joinpath(build_root, 'variables.global')
396
397global_vars = Variables(global_vars_file, args=ARGUMENTS)
398
399global_vars.AddVariables(
400 ('CC', 'C compiler', environ.get('CC', main['CC'])),
401 ('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])),
402 ('SWIG', 'SWIG tool', environ.get('SWIG', main['SWIG'])),
403 ('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')),
404 ('BATCH', 'Use batch pool for build and tests', False),
405 ('BATCH_CMD', 'Batch pool submission command name', 'qdo'),
406 ('M5_BUILD_CACHE', 'Cache built objects in this directory', False),
407 ('EXTRAS', 'Add extra directories to the compilation', '')
408 )
409
410# Update main environment with values from ARGUMENTS & global_vars_file
411global_vars.Update(main)
412help_texts["global_vars"] += global_vars.GenerateHelpText(main)
413
414# Save sticky variable settings back to current variables file
415global_vars.Save(global_vars_file, main)
416
417# Parse EXTRAS variable to build list of all directories where we're
418# look for sources etc. This list is exported as extras_dir_list.
419base_dir = main.srcdir.abspath
420if main['EXTRAS']:
421 extras_dir_list = makePathListAbsolute(main['EXTRAS'].split(':'))
422else:
423 extras_dir_list = []
424
425Export('base_dir')
426Export('extras_dir_list')
427
428# the ext directory should be on the #includes path
429main.Append(CPPPATH=[Dir('ext')])
430
431def strip_build_path(path, env):
432 path = str(path)
433 variant_base = env['BUILDROOT'] + os.path.sep
434 if path.startswith(variant_base):
435 path = path[len(variant_base):]
436 elif path.startswith('build/'):
437 path = path[6:]
438 return path
439
440# Generate a string of the form:
441# common/path/prefix/src1, src2 -> tgt1, tgt2
442# to print while building.
443class Transform(object):
444 # all specific color settings should be here and nowhere else
445 tool_color = termcap.Normal
446 pfx_color = termcap.Yellow
447 srcs_color = termcap.Yellow + termcap.Bold
448 arrow_color = termcap.Blue + termcap.Bold
449 tgts_color = termcap.Yellow + termcap.Bold
450
451 def __init__(self, tool, max_sources=99):
452 self.format = self.tool_color + (" [%8s] " % tool) \
453 + self.pfx_color + "%s" \
454 + self.srcs_color + "%s" \
455 + self.arrow_color + " -> " \
456 + self.tgts_color + "%s" \
457 + termcap.Normal
458 self.max_sources = max_sources
459
460 def __call__(self, target, source, env, for_signature=None):
461 # truncate source list according to max_sources param
462 source = source[0:self.max_sources]
463 def strip(f):
464 return strip_build_path(str(f), env)
465 if len(source) > 0:
466 srcs = map(strip, source)
467 else:
468 srcs = ['']
469 tgts = map(strip, target)
470 # surprisingly, os.path.commonprefix is a dumb char-by-char string
471 # operation that has nothing to do with paths.
472 com_pfx = os.path.commonprefix(srcs + tgts)
473 com_pfx_len = len(com_pfx)
474 if com_pfx:
475 # do some cleanup and sanity checking on common prefix
476 if com_pfx[-1] == ".":
477 # prefix matches all but file extension: ok
478 # back up one to change 'foo.cc -> o' to 'foo.cc -> .o'
479 com_pfx = com_pfx[0:-1]
480 elif com_pfx[-1] == "/":
481 # common prefix is directory path: OK
482 pass
483 else:
484 src0_len = len(srcs[0])
485 tgt0_len = len(tgts[0])
486 if src0_len == com_pfx_len:
487 # source is a substring of target, OK
488 pass
489 elif tgt0_len == com_pfx_len:
490 # target is a substring of source, need to back up to
491 # avoid empty string on RHS of arrow
492 sep_idx = com_pfx.rfind(".")
493 if sep_idx != -1:
494 com_pfx = com_pfx[0:sep_idx]
495 else:
496 com_pfx = ''
497 elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".":
498 # still splitting at file extension: ok
499 pass
500 else:
501 # probably a fluke; ignore it
502 com_pfx = ''
503 # recalculate length in case com_pfx was modified
504 com_pfx_len = len(com_pfx)
505 def fmt(files):
506 f = map(lambda s: s[com_pfx_len:], files)
507 return ', '.join(f)
508 return self.format % (com_pfx, fmt(srcs), fmt(tgts))
509
510Export('Transform')
511
512# enable the regression script to use the termcap
513main['TERMCAP'] = termcap
514
515if GetOption('verbose'):
516 def MakeAction(action, string, *args, **kwargs):
517 return Action(action, *args, **kwargs)
518else:
519 MakeAction = Action
520 main['CCCOMSTR'] = Transform("CC")
521 main['CXXCOMSTR'] = Transform("CXX")
522 main['ASCOMSTR'] = Transform("AS")
523 main['SWIGCOMSTR'] = Transform("SWIG")
524 main['ARCOMSTR'] = Transform("AR", 0)
525 main['LINKCOMSTR'] = Transform("LINK", 0)
526 main['RANLIBCOMSTR'] = Transform("RANLIB", 0)
527 main['M4COMSTR'] = Transform("M4")
528 main['SHCCCOMSTR'] = Transform("SHCC")
529 main['SHCXXCOMSTR'] = Transform("SHCXX")
530Export('MakeAction')
531
532# Initialize the Link-Time Optimization (LTO) flags
533main['LTO_CCFLAGS'] = []
534main['LTO_LDFLAGS'] = []
535
536# According to the readme, tcmalloc works best if the compiler doesn't
537# assume that we're using the builtin malloc and friends. These flags
538# are compiler-specific, so we need to set them after we detect which
539# compiler we're using.
540main['TCMALLOC_CCFLAGS'] = []
541
542CXX_version = readCommand([main['CXX'],'--version'], exception=False)
543CXX_V = readCommand([main['CXX'],'-V'], exception=False)
544
545main['GCC'] = CXX_version and CXX_version.find('g++') >= 0
546main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0
547if main['GCC'] + main['CLANG'] > 1:
548 print 'Error: How can we have two at the same time?'
549 Exit(1)
550
551# Set up default C++ compiler flags
552if main['GCC'] or main['CLANG']:
553 # As gcc and clang share many flags, do the common parts here
554 main.Append(CCFLAGS=['-pipe'])
555 main.Append(CCFLAGS=['-fno-strict-aliasing'])
556 # Enable -Wall and then disable the few warnings that we
557 # consistently violate
558 main.Append(CCFLAGS=['-Wall', '-Wno-sign-compare', '-Wundef'])
559 # We always compile using C++11, but only gcc >= 4.7 and clang 3.1
560 # actually use that name, so we stick with c++0x
561 main.Append(CXXFLAGS=['-std=c++0x'])
562 # Add selected sanity checks from -Wextra
563 main.Append(CXXFLAGS=['-Wmissing-field-initializers',
564 '-Woverloaded-virtual'])
565else:
566 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
567 print "Don't know what compiler options to use for your compiler."
568 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX']
569 print termcap.Yellow + ' version:' + termcap.Normal,
570 if not CXX_version:
571 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
572 termcap.Normal
573 else:
574 print CXX_version.replace('\n', '<nl>')
575 print " If you're trying to use a compiler other than GCC"
576 print " or clang, there appears to be something wrong with your"
577 print " environment."
578 print " "
579 print " If you are trying to use a compiler other than those listed"
580 print " above you will need to ease fix SConstruct and "
581 print " src/SConscript to support that compiler."
582 Exit(1)
583
584if main['GCC']:
585 # Check for a supported version of gcc. >= 4.6 is chosen for its
586 # level of c++11 support. See
587 # http://gcc.gnu.org/projects/cxx0x.html for details. 4.6 is also
588 # the first version with proper LTO support.
589 gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False)
590 if compareVersions(gcc_version, "4.6") < 0:
591 print 'Error: gcc version 4.6 or newer required.'
592 print ' Installed version:', gcc_version
593 Exit(1)
594
595 main['GCC_VERSION'] = gcc_version
596
597 # gcc from version 4.8 and above generates "rep; ret" instructions
598 # to avoid performance penalties on certain AMD chips. Older
599 # assemblers detect this as an error, "Error: expecting string
600 # instruction after `rep'"
601 if compareVersions(gcc_version, "4.8") > 0:
602 as_version = readCommand([main['AS'], '-v', '/dev/null'],
603 exception=False).split()
604
605 if not as_version or compareVersions(as_version[-1], "2.23") < 0:
606 print termcap.Yellow + termcap.Bold + \
607 'Warning: This combination of gcc and binutils have' + \
608 ' known incompatibilities.\n' + \
609 ' If you encounter build problems, please update ' + \
610 'binutils to 2.23.' + \
611 termcap.Normal
612
613 # Make sure we warn if the user has requested to compile with the
614 # Undefined Benahvior Sanitizer and this version of gcc does not
615 # support it.
616 if GetOption('with_ubsan') and \
617 compareVersions(gcc_version, '4.9') < 0:
618 print termcap.Yellow + termcap.Bold + \
619 'Warning: UBSan is only supported using gcc 4.9 and later.' + \
620 termcap.Normal
621
622 # Add the appropriate Link-Time Optimization (LTO) flags
623 # unless LTO is explicitly turned off. Note that these flags
624 # are only used by the fast target.
625 if not GetOption('no_lto'):
626 # Pass the LTO flag when compiling to produce GIMPLE
627 # output, we merely create the flags here and only append
628 # them later
629 main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
630
631 # Use the same amount of jobs for LTO as we are running
632 # scons with
633 main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
634
635 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc',
636 '-fno-builtin-realloc', '-fno-builtin-free'])
637
638elif main['CLANG']:
639 # Check for a supported version of clang, >= 3.0 is needed to
640 # support similar features as gcc 4.6. See
641 # http://clang.llvm.org/cxx_status.html for details
642 clang_version_re = re.compile(".* version (\d+\.\d+)")
643 clang_version_match = clang_version_re.search(CXX_version)
644 if (clang_version_match):
645 clang_version = clang_version_match.groups()[0]
646 if compareVersions(clang_version, "3.0") < 0:
647 print 'Error: clang version 3.0 or newer required.'
648 print ' Installed version:', clang_version
649 Exit(1)
650 else:
651 print 'Error: Unable to determine clang version.'
652 Exit(1)
653
654 # clang has a few additional warnings that we disable,
655 # tautological comparisons are allowed due to unsigned integers
656 # being compared to constants that happen to be 0, and extraneous
657 # parantheses are allowed due to Ruby's printing of the AST,
658 # finally self assignments are allowed as the generated CPU code
659 # is relying on this
660 main.Append(CCFLAGS=['-Wno-tautological-compare',
661 '-Wno-parentheses',
662 '-Wno-self-assign',
663 # Some versions of libstdc++ (4.8?) seem to
664 # use struct hash and class hash
665 # interchangeably.
666 '-Wno-mismatched-tags',
667 ])
668
669 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin'])
670
671 # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as
672 # opposed to libstdc++, as the later is dated.
673 if sys.platform == "darwin":
674 main.Append(CXXFLAGS=['-stdlib=libc++'])
675 main.Append(LIBS=['c++'])
676
677else:
678 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
679 print "Don't know what compiler options to use for your compiler."
680 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX']
681 print termcap.Yellow + ' version:' + termcap.Normal,
682 if not CXX_version:
683 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
684 termcap.Normal
685 else:
686 print CXX_version.replace('\n', '<nl>')
687 print " If you're trying to use a compiler other than GCC"
688 print " or clang, there appears to be something wrong with your"
689 print " environment."
690 print " "
691 print " If you are trying to use a compiler other than those listed"
692 print " above you will need to ease fix SConstruct and "
693 print " src/SConscript to support that compiler."
694 Exit(1)
695
696# Set up common yacc/bison flags (needed for Ruby)
697main['YACCFLAGS'] = '-d'
698main['YACCHXXFILESUFFIX'] = '.hh'
699
700# Do this after we save setting back, or else we'll tack on an
701# extra 'qdo' every time we run scons.
702if main['BATCH']:
703 main['CC'] = main['BATCH_CMD'] + ' ' + main['CC']
704 main['CXX'] = main['BATCH_CMD'] + ' ' + main['CXX']
705 main['AS'] = main['BATCH_CMD'] + ' ' + main['AS']
706 main['AR'] = main['BATCH_CMD'] + ' ' + main['AR']
707 main['RANLIB'] = main['BATCH_CMD'] + ' ' + main['RANLIB']
708
709if sys.platform == 'cygwin':
710 # cygwin has some header file issues...
711 main.Append(CCFLAGS=["-Wno-uninitialized"])
712
713# Check for the protobuf compiler
714protoc_version = readCommand([main['PROTOC'], '--version'],
715 exception='').split()
716
717# First two words should be "libprotoc x.y.z"
718if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc':
719 print termcap.Yellow + termcap.Bold + \
720 'Warning: Protocol buffer compiler (protoc) not found.\n' + \
721 ' Please install protobuf-compiler for tracing support.' + \
722 termcap.Normal
723 main['PROTOC'] = False
724else:
725 # Based on the availability of the compress stream wrappers,
726 # require 2.1.0
727 min_protoc_version = '2.1.0'
728 if compareVersions(protoc_version[1], min_protoc_version) < 0:
729 print termcap.Yellow + termcap.Bold + \
730 'Warning: protoc version', min_protoc_version, \
731 'or newer required.\n' + \
732 ' Installed version:', protoc_version[1], \
733 termcap.Normal
734 main['PROTOC'] = False
735 else:
736 # Attempt to determine the appropriate include path and
737 # library path using pkg-config, that means we also need to
738 # check for pkg-config. Note that it is possible to use
739 # protobuf without the involvement of pkg-config. Later on we
740 # check go a library config check and at that point the test
741 # will fail if libprotobuf cannot be found.
742 if readCommand(['pkg-config', '--version'], exception=''):
743 try:
744 # Attempt to establish what linking flags to add for protobuf
745 # using pkg-config
746 main.ParseConfig('pkg-config --cflags --libs-only-L protobuf')
747 except:
748 print termcap.Yellow + termcap.Bold + \
749 'Warning: pkg-config could not get protobuf flags.' + \
750 termcap.Normal
751
752# Check for SWIG
753if not main.has_key('SWIG'):
754 print 'Error: SWIG utility not found.'
755 print ' Please install (see http://www.swig.org) and retry.'
756 Exit(1)
757
758# Check for appropriate SWIG version
759swig_version = readCommand([main['SWIG'], '-version'], exception='').split()
760# First 3 words should be "SWIG Version x.y.z"
761if len(swig_version) < 3 or \
762 swig_version[0] != 'SWIG' or swig_version[1] != 'Version':
763 print 'Error determining SWIG version.'
764 Exit(1)
765
766min_swig_version = '2.0.4'
767if compareVersions(swig_version[2], min_swig_version) < 0:
768 print 'Error: SWIG version', min_swig_version, 'or newer required.'
769 print ' Installed version:', swig_version[2]
770 Exit(1)
771
772# Check for known incompatibilities. The standard library shipped with
773# gcc >= 4.9 does not play well with swig versions prior to 3.0
774if main['GCC'] and compareVersions(gcc_version, '4.9') >= 0 and \
775 compareVersions(swig_version[2], '3.0') < 0:
776 print termcap.Yellow + termcap.Bold + \
777 'Warning: This combination of gcc and swig have' + \
778 ' known incompatibilities.\n' + \
779 ' If you encounter build problems, please update ' + \
780 'swig to 3.0 or later.' + \
781 termcap.Normal
782
783# Set up SWIG flags & scanner
784swig_flags=Split('-c++ -python -modern -templatereduce $_CPPINCFLAGS')
785main.Append(SWIGFLAGS=swig_flags)
786
787# Check for 'timeout' from GNU coreutils. If present, regressions
788# will be run with a time limit.
789TIMEOUT_version = readCommand(['timeout', '--version'], exception=False)
790main['TIMEOUT'] = TIMEOUT_version and TIMEOUT_version.find('timeout') == 0
787# Check for 'timeout' from GNU coreutils. If present, regressions will
788# be run with a time limit. We require version 8.13 since we rely on
789# support for the '--foreground' option.
790timeout_lines = readCommand(['timeout', '--version'],
791 exception='').splitlines()
792# Get the first line and tokenize it
793timeout_version = timeout_lines[0].split() if timeout_lines else []
794main['TIMEOUT'] = timeout_version and \
795 compareVersions(timeout_version[-1], '8.13') >= 0
791
792# filter out all existing swig scanners, they mess up the dependency
793# stuff for some reason
794scanners = []
795for scanner in main['SCANNERS']:
796 skeys = scanner.skeys
797 if skeys == '.i':
798 continue
799
800 if isinstance(skeys, (list, tuple)) and '.i' in skeys:
801 continue
802
803 scanners.append(scanner)
804
805# add the new swig scanner that we like better
806from SCons.Scanner import ClassicCPP as CPPScanner
807swig_inc_re = '^[ \t]*[%,#][ \t]*(?:include|import)[ \t]*(<|")([^>"]+)(>|")'
808scanners.append(CPPScanner("SwigScan", [ ".i" ], "CPPPATH", swig_inc_re))
809
810# replace the scanners list that has what we want
811main['SCANNERS'] = scanners
812
813# Add a custom Check function to the Configure context so that we can
814# figure out if the compiler adds leading underscores to global
815# variables. This is needed for the autogenerated asm files that we
816# use for embedding the python code.
817def CheckLeading(context):
818 context.Message("Checking for leading underscore in global variables...")
819 # 1) Define a global variable called x from asm so the C compiler
820 # won't change the symbol at all.
821 # 2) Declare that variable.
822 # 3) Use the variable
823 #
824 # If the compiler prepends an underscore, this will successfully
825 # link because the external symbol 'x' will be called '_x' which
826 # was defined by the asm statement. If the compiler does not
827 # prepend an underscore, this will not successfully link because
828 # '_x' will have been defined by assembly, while the C portion of
829 # the code will be trying to use 'x'
830 ret = context.TryLink('''
831 asm(".globl _x; _x: .byte 0");
832 extern int x;
833 int main() { return x; }
834 ''', extension=".c")
835 context.env.Append(LEADING_UNDERSCORE=ret)
836 context.Result(ret)
837 return ret
838
839# Add a custom Check function to test for structure members.
840def CheckMember(context, include, decl, member, include_quotes="<>"):
841 context.Message("Checking for member %s in %s..." %
842 (member, decl))
843 text = """
844#include %(header)s
845int main(){
846 %(decl)s test;
847 (void)test.%(member)s;
848 return 0;
849};
850""" % { "header" : include_quotes[0] + include + include_quotes[1],
851 "decl" : decl,
852 "member" : member,
853 }
854
855 ret = context.TryCompile(text, extension=".cc")
856 context.Result(ret)
857 return ret
858
859# Platform-specific configuration. Note again that we assume that all
860# builds under a given build root run on the same host platform.
861conf = Configure(main,
862 conf_dir = joinpath(build_root, '.scons_config'),
863 log_file = joinpath(build_root, 'scons_config.log'),
864 custom_tests = {
865 'CheckLeading' : CheckLeading,
866 'CheckMember' : CheckMember,
867 })
868
869# Check for leading underscores. Don't really need to worry either
870# way so don't need to check the return code.
871conf.CheckLeading()
872
873# Check if we should compile a 64 bit binary on Mac OS X/Darwin
874try:
875 import platform
876 uname = platform.uname()
877 if uname[0] == 'Darwin' and compareVersions(uname[2], '9.0.0') >= 0:
878 if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]):
879 main.Append(CCFLAGS=['-arch', 'x86_64'])
880 main.Append(CFLAGS=['-arch', 'x86_64'])
881 main.Append(LINKFLAGS=['-arch', 'x86_64'])
882 main.Append(ASFLAGS=['-arch', 'x86_64'])
883except:
884 pass
885
886# Recent versions of scons substitute a "Null" object for Configure()
887# when configuration isn't necessary, e.g., if the "--help" option is
888# present. Unfortuantely this Null object always returns false,
889# breaking all our configuration checks. We replace it with our own
890# more optimistic null object that returns True instead.
891if not conf:
892 def NullCheck(*args, **kwargs):
893 return True
894
895 class NullConf:
896 def __init__(self, env):
897 self.env = env
898 def Finish(self):
899 return self.env
900 def __getattr__(self, mname):
901 return NullCheck
902
903 conf = NullConf(main)
904
905# Cache build files in the supplied directory.
906if main['M5_BUILD_CACHE']:
907 print 'Using build cache located at', main['M5_BUILD_CACHE']
908 CacheDir(main['M5_BUILD_CACHE'])
909
910if not GetOption('without_python'):
911 # Find Python include and library directories for embedding the
912 # interpreter. We rely on python-config to resolve the appropriate
913 # includes and linker flags. ParseConfig does not seem to understand
914 # the more exotic linker flags such as -Xlinker and -export-dynamic so
915 # we add them explicitly below. If you want to link in an alternate
916 # version of python, see above for instructions on how to invoke
917 # scons with the appropriate PATH set.
918 #
919 # First we check if python2-config exists, else we use python-config
920 python_config = readCommand(['which', 'python2-config'],
921 exception='').strip()
922 if not os.path.exists(python_config):
923 python_config = readCommand(['which', 'python-config'],
924 exception='').strip()
925 py_includes = readCommand([python_config, '--includes'],
926 exception='').split()
927 # Strip the -I from the include folders before adding them to the
928 # CPPPATH
929 main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes))
930
931 # Read the linker flags and split them into libraries and other link
932 # flags. The libraries are added later through the call the CheckLib.
933 py_ld_flags = readCommand([python_config, '--ldflags'],
934 exception='').split()
935 py_libs = []
936 for lib in py_ld_flags:
937 if not lib.startswith('-l'):
938 main.Append(LINKFLAGS=[lib])
939 else:
940 lib = lib[2:]
941 if lib not in py_libs:
942 py_libs.append(lib)
943
944 # verify that this stuff works
945 if not conf.CheckHeader('Python.h', '<>'):
946 print "Error: can't find Python.h header in", py_includes
947 print "Install Python headers (package python-dev on Ubuntu and RedHat)"
948 Exit(1)
949
950 for lib in py_libs:
951 if not conf.CheckLib(lib):
952 print "Error: can't find library %s required by python" % lib
953 Exit(1)
954
955# On Solaris you need to use libsocket for socket ops
956if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'):
957 if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'):
958 print "Can't find library with socket calls (e.g. accept())"
959 Exit(1)
960
961# Check for zlib. If the check passes, libz will be automatically
962# added to the LIBS environment variable.
963if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'):
964 print 'Error: did not find needed zlib compression library '\
965 'and/or zlib.h header file.'
966 print ' Please install zlib and try again.'
967 Exit(1)
968
969# If we have the protobuf compiler, also make sure we have the
970# development libraries. If the check passes, libprotobuf will be
971# automatically added to the LIBS environment variable. After
972# this, we can use the HAVE_PROTOBUF flag to determine if we have
973# got both protoc and libprotobuf available.
974main['HAVE_PROTOBUF'] = main['PROTOC'] and \
975 conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h',
976 'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;')
977
978# If we have the compiler but not the library, print another warning.
979if main['PROTOC'] and not main['HAVE_PROTOBUF']:
980 print termcap.Yellow + termcap.Bold + \
981 'Warning: did not find protocol buffer library and/or headers.\n' + \
982 ' Please install libprotobuf-dev for tracing support.' + \
983 termcap.Normal
984
985# Check for librt.
986have_posix_clock = \
987 conf.CheckLibWithHeader(None, 'time.h', 'C',
988 'clock_nanosleep(0,0,NULL,NULL);') or \
989 conf.CheckLibWithHeader('rt', 'time.h', 'C',
990 'clock_nanosleep(0,0,NULL,NULL);')
991
992have_posix_timers = \
993 conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C',
994 'timer_create(CLOCK_MONOTONIC, NULL, NULL);')
995
996if not GetOption('without_tcmalloc'):
997 if conf.CheckLib('tcmalloc'):
998 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
999 elif conf.CheckLib('tcmalloc_minimal'):
1000 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
1001 else:
1002 print termcap.Yellow + termcap.Bold + \
1003 "You can get a 12% performance improvement by "\
1004 "installing tcmalloc (libgoogle-perftools-dev package "\
1005 "on Ubuntu or RedHat)." + termcap.Normal
1006
1007if not have_posix_clock:
1008 print "Can't find library for POSIX clocks."
1009
1010# Check for <fenv.h> (C99 FP environment control)
1011have_fenv = conf.CheckHeader('fenv.h', '<>')
1012if not have_fenv:
1013 print "Warning: Header file <fenv.h> not found."
1014 print " This host has no IEEE FP rounding mode control."
1015
1016# Check if we should enable KVM-based hardware virtualization. The API
1017# we rely on exists since version 2.6.36 of the kernel, but somehow
1018# the KVM_API_VERSION does not reflect the change. We test for one of
1019# the types as a fall back.
1020have_kvm = conf.CheckHeader('linux/kvm.h', '<>') and \
1021 conf.CheckTypeSize('struct kvm_xsave', '#include <linux/kvm.h>') != 0
1022if not have_kvm:
1023 print "Info: Compatible header file <linux/kvm.h> not found, " \
1024 "disabling KVM support."
1025
1026# Check if the requested target ISA is compatible with the host
1027def is_isa_kvm_compatible(isa):
1028 isa_comp_table = {
1029 "arm" : ( "armv7l" ),
1030 "x86" : ( "x86_64" ),
1031 }
1032 try:
1033 import platform
1034 host_isa = platform.machine()
1035 except:
1036 print "Warning: Failed to determine host ISA."
1037 return False
1038
1039 return host_isa in isa_comp_table.get(isa, [])
1040
1041
1042# Check if the exclude_host attribute is available. We want this to
1043# get accurate instruction counts in KVM.
1044main['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf.CheckMember(
1045 'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host')
1046
1047
1048######################################################################
1049#
1050# Finish the configuration
1051#
1052main = conf.Finish()
1053
1054######################################################################
1055#
1056# Collect all non-global variables
1057#
1058
1059# Define the universe of supported ISAs
1060all_isa_list = [ ]
1061Export('all_isa_list')
1062
1063class CpuModel(object):
1064 '''The CpuModel class encapsulates everything the ISA parser needs to
1065 know about a particular CPU model.'''
1066
1067 # Dict of available CPU model objects. Accessible as CpuModel.dict.
1068 dict = {}
1069
1070 # Constructor. Automatically adds models to CpuModel.dict.
1071 def __init__(self, name, default=False):
1072 self.name = name # name of model
1073
1074 # This cpu is enabled by default
1075 self.default = default
1076
1077 # Add self to dict
1078 if name in CpuModel.dict:
1079 raise AttributeError, "CpuModel '%s' already registered" % name
1080 CpuModel.dict[name] = self
1081
1082Export('CpuModel')
1083
1084# Sticky variables get saved in the variables file so they persist from
1085# one invocation to the next (unless overridden, in which case the new
1086# value becomes sticky).
1087sticky_vars = Variables(args=ARGUMENTS)
1088Export('sticky_vars')
1089
1090# Sticky variables that should be exported
1091export_vars = []
1092Export('export_vars')
1093
1094# For Ruby
1095all_protocols = []
1096Export('all_protocols')
1097protocol_dirs = []
1098Export('protocol_dirs')
1099slicc_includes = []
1100Export('slicc_includes')
1101
1102# Walk the tree and execute all SConsopts scripts that wil add to the
1103# above variables
1104if GetOption('verbose'):
1105 print "Reading SConsopts"
1106for bdir in [ base_dir ] + extras_dir_list:
1107 if not isdir(bdir):
1108 print "Error: directory '%s' does not exist" % bdir
1109 Exit(1)
1110 for root, dirs, files in os.walk(bdir):
1111 if 'SConsopts' in files:
1112 if GetOption('verbose'):
1113 print "Reading", joinpath(root, 'SConsopts')
1114 SConscript(joinpath(root, 'SConsopts'))
1115
1116all_isa_list.sort()
1117
1118sticky_vars.AddVariables(
1119 EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list),
1120 ListVariable('CPU_MODELS', 'CPU models',
1121 sorted(n for n,m in CpuModel.dict.iteritems() if m.default),
1122 sorted(CpuModel.dict.keys())),
1123 BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger',
1124 False),
1125 BoolVariable('SS_COMPATIBLE_FP',
1126 'Make floating-point results compatible with SimpleScalar',
1127 False),
1128 BoolVariable('USE_SSE2',
1129 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts',
1130 False),
1131 BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock),
1132 BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv),
1133 BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability', False),
1134 BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models', have_kvm),
1135 EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None',
1136 all_protocols),
1137 )
1138
1139# These variables get exported to #defines in config/*.hh (see src/SConscript).
1140export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'CP_ANNOTATE',
1141 'USE_POSIX_CLOCK', 'USE_KVM', 'PROTOCOL', 'HAVE_PROTOBUF',
1142 'HAVE_PERF_ATTR_EXCLUDE_HOST']
1143
1144###################################################
1145#
1146# Define a SCons builder for configuration flag headers.
1147#
1148###################################################
1149
1150# This function generates a config header file that #defines the
1151# variable symbol to the current variable setting (0 or 1). The source
1152# operands are the name of the variable and a Value node containing the
1153# value of the variable.
1154def build_config_file(target, source, env):
1155 (variable, value) = [s.get_contents() for s in source]
1156 f = file(str(target[0]), 'w')
1157 print >> f, '#define', variable, value
1158 f.close()
1159 return None
1160
1161# Combine the two functions into a scons Action object.
1162config_action = MakeAction(build_config_file, Transform("CONFIG H", 2))
1163
1164# The emitter munges the source & target node lists to reflect what
1165# we're really doing.
1166def config_emitter(target, source, env):
1167 # extract variable name from Builder arg
1168 variable = str(target[0])
1169 # True target is config header file
1170 target = joinpath('config', variable.lower() + '.hh')
1171 val = env[variable]
1172 if isinstance(val, bool):
1173 # Force value to 0/1
1174 val = int(val)
1175 elif isinstance(val, str):
1176 val = '"' + val + '"'
1177
1178 # Sources are variable name & value (packaged in SCons Value nodes)
1179 return ([target], [Value(variable), Value(val)])
1180
1181config_builder = Builder(emitter = config_emitter, action = config_action)
1182
1183main.Append(BUILDERS = { 'ConfigFile' : config_builder })
1184
1185# libelf build is shared across all configs in the build root.
1186main.SConscript('ext/libelf/SConscript',
1187 variant_dir = joinpath(build_root, 'libelf'))
1188
1189# gzstream build is shared across all configs in the build root.
1190main.SConscript('ext/gzstream/SConscript',
1191 variant_dir = joinpath(build_root, 'gzstream'))
1192
1193# libfdt build is shared across all configs in the build root.
1194main.SConscript('ext/libfdt/SConscript',
1195 variant_dir = joinpath(build_root, 'libfdt'))
1196
1197# fputils build is shared across all configs in the build root.
1198main.SConscript('ext/fputils/SConscript',
1199 variant_dir = joinpath(build_root, 'fputils'))
1200
1201# DRAMSim2 build is shared across all configs in the build root.
1202main.SConscript('ext/dramsim2/SConscript',
1203 variant_dir = joinpath(build_root, 'dramsim2'))
1204
1205# DRAMPower build is shared across all configs in the build root.
1206main.SConscript('ext/drampower/SConscript',
1207 variant_dir = joinpath(build_root, 'drampower'))
1208
1209###################################################
1210#
1211# This function is used to set up a directory with switching headers
1212#
1213###################################################
1214
1215main['ALL_ISA_LIST'] = all_isa_list
1216all_isa_deps = {}
1217def make_switching_dir(dname, switch_headers, env):
1218 # Generate the header. target[0] is the full path of the output
1219 # header to generate. 'source' is a dummy variable, since we get the
1220 # list of ISAs from env['ALL_ISA_LIST'].
1221 def gen_switch_hdr(target, source, env):
1222 fname = str(target[0])
1223 isa = env['TARGET_ISA'].lower()
1224 try:
1225 f = open(fname, 'w')
1226 print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname))
1227 f.close()
1228 except IOError:
1229 print "Failed to create %s" % fname
1230 raise
1231
1232 # Build SCons Action object. 'varlist' specifies env vars that this
1233 # action depends on; when env['ALL_ISA_LIST'] changes these actions
1234 # should get re-executed.
1235 switch_hdr_action = MakeAction(gen_switch_hdr,
1236 Transform("GENERATE"), varlist=['ALL_ISA_LIST'])
1237
1238 # Instantiate actions for each header
1239 for hdr in switch_headers:
1240 env.Command(hdr, [], switch_hdr_action)
1241
1242 isa_target = Dir('.').up().name.lower().replace('_', '-')
1243 env['PHONY_BASE'] = '#'+isa_target
1244 all_isa_deps[isa_target] = None
1245
1246Export('make_switching_dir')
1247
1248# all-isas -> all-deps -> all-environs -> all_targets
1249main.Alias('#all-isas', [])
1250main.Alias('#all-deps', '#all-isas')
1251
1252# Dummy target to ensure all environments are created before telling
1253# SCons what to actually make (the command line arguments). We attach
1254# them to the dependence graph after the environments are complete.
1255ORIG_BUILD_TARGETS = list(BUILD_TARGETS) # force a copy; gets closure to work.
1256def environsComplete(target, source, env):
1257 for t in ORIG_BUILD_TARGETS:
1258 main.Depends('#all-targets', t)
1259
1260# Each build/* switching_dir attaches its *-environs target to #all-environs.
1261main.Append(BUILDERS = {'CompleteEnvirons' :
1262 Builder(action=MakeAction(environsComplete, None))})
1263main.CompleteEnvirons('#all-environs', [])
1264
1265def doNothing(**ignored): pass
1266main.Append(BUILDERS = {'Dummy': Builder(action=MakeAction(doNothing, None))})
1267
1268# The final target to which all the original targets ultimately get attached.
1269main.Dummy('#all-targets', '#all-environs')
1270BUILD_TARGETS[:] = ['#all-targets']
1271
1272###################################################
1273#
1274# Define build environments for selected configurations.
1275#
1276###################################################
1277
1278for variant_path in variant_paths:
1279 if not GetOption('silent'):
1280 print "Building in", variant_path
1281
1282 # Make a copy of the build-root environment to use for this config.
1283 env = main.Clone()
1284 env['BUILDDIR'] = variant_path
1285
1286 # variant_dir is the tail component of build path, and is used to
1287 # determine the build parameters (e.g., 'ALPHA_SE')
1288 (build_root, variant_dir) = splitpath(variant_path)
1289
1290 # Set env variables according to the build directory config.
1291 sticky_vars.files = []
1292 # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in
1293 # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke
1294 # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings.
1295 current_vars_file = joinpath(build_root, 'variables', variant_dir)
1296 if isfile(current_vars_file):
1297 sticky_vars.files.append(current_vars_file)
1298 if not GetOption('silent'):
1299 print "Using saved variables file %s" % current_vars_file
1300 else:
1301 # Build dir-specific variables file doesn't exist.
1302
1303 # Make sure the directory is there so we can create it later
1304 opt_dir = dirname(current_vars_file)
1305 if not isdir(opt_dir):
1306 mkdir(opt_dir)
1307
1308 # Get default build variables from source tree. Variables are
1309 # normally determined by name of $VARIANT_DIR, but can be
1310 # overridden by '--default=' arg on command line.
1311 default = GetOption('default')
1312 opts_dir = joinpath(main.root.abspath, 'build_opts')
1313 if default:
1314 default_vars_files = [joinpath(build_root, 'variables', default),
1315 joinpath(opts_dir, default)]
1316 else:
1317 default_vars_files = [joinpath(opts_dir, variant_dir)]
1318 existing_files = filter(isfile, default_vars_files)
1319 if existing_files:
1320 default_vars_file = existing_files[0]
1321 sticky_vars.files.append(default_vars_file)
1322 print "Variables file %s not found,\n using defaults in %s" \
1323 % (current_vars_file, default_vars_file)
1324 else:
1325 print "Error: cannot find variables file %s or " \
1326 "default file(s) %s" \
1327 % (current_vars_file, ' or '.join(default_vars_files))
1328 Exit(1)
1329
1330 # Apply current variable settings to env
1331 sticky_vars.Update(env)
1332
1333 help_texts["local_vars"] += \
1334 "Build variables for %s:\n" % variant_dir \
1335 + sticky_vars.GenerateHelpText(env)
1336
1337 # Process variable settings.
1338
1339 if not have_fenv and env['USE_FENV']:
1340 print "Warning: <fenv.h> not available; " \
1341 "forcing USE_FENV to False in", variant_dir + "."
1342 env['USE_FENV'] = False
1343
1344 if not env['USE_FENV']:
1345 print "Warning: No IEEE FP rounding mode control in", variant_dir + "."
1346 print " FP results may deviate slightly from other platforms."
1347
1348 if env['EFENCE']:
1349 env.Append(LIBS=['efence'])
1350
1351 if env['USE_KVM']:
1352 if not have_kvm:
1353 print "Warning: Can not enable KVM, host seems to lack KVM support"
1354 env['USE_KVM'] = False
1355 elif not have_posix_timers:
1356 print "Warning: Can not enable KVM, host seems to lack support " \
1357 "for POSIX timers"
1358 env['USE_KVM'] = False
1359 elif not is_isa_kvm_compatible(env['TARGET_ISA']):
1360 print "Info: KVM support disabled due to unsupported host and " \
1361 "target ISA combination"
1362 env['USE_KVM'] = False
1363
1364 # Warn about missing optional functionality
1365 if env['USE_KVM']:
1366 if not main['HAVE_PERF_ATTR_EXCLUDE_HOST']:
1367 print "Warning: perf_event headers lack support for the " \
1368 "exclude_host attribute. KVM instruction counts will " \
1369 "be inaccurate."
1370
1371 # Save sticky variable settings back to current variables file
1372 sticky_vars.Save(current_vars_file, env)
1373
1374 if env['USE_SSE2']:
1375 env.Append(CCFLAGS=['-msse2'])
1376
1377 # The src/SConscript file sets up the build rules in 'env' according
1378 # to the configured variables. It returns a list of environments,
1379 # one for each variant build (debug, opt, etc.)
1380 SConscript('src/SConscript', variant_dir = variant_path, exports = 'env')
1381
1382def pairwise(iterable):
1383 "s -> (s0,s1), (s1,s2), (s2, s3), ..."
1384 a, b = itertools.tee(iterable)
1385 b.next()
1386 return itertools.izip(a, b)
1387
1388# Create false dependencies so SCons will parse ISAs, establish
1389# dependencies, and setup the build Environments serially. Either
1390# SCons (likely) and/or our SConscripts (possibly) cannot cope with -j
1391# greater than 1. It appears to be standard race condition stuff; it
1392# doesn't always fail, but usually, and the behaviors are different.
1393# Every time I tried to remove this, builds would fail in some
1394# creative new way. So, don't do that. You'll want to, though, because
1395# tests/SConscript takes a long time to make its Environments.
1396for t1, t2 in pairwise(sorted(all_isa_deps.iterkeys())):
1397 main.Depends('#%s-deps' % t2, '#%s-deps' % t1)
1398 main.Depends('#%s-environs' % t2, '#%s-environs' % t1)
1399
1400# base help text
1401Help('''
1402Usage: scons [scons options] [build variables] [target(s)]
1403
1404Extra scons options:
1405%(options)s
1406
1407Global build variables:
1408%(global_vars)s
1409
1410%(local_vars)s
1411''' % help_texts)
796
797# filter out all existing swig scanners, they mess up the dependency
798# stuff for some reason
799scanners = []
800for scanner in main['SCANNERS']:
801 skeys = scanner.skeys
802 if skeys == '.i':
803 continue
804
805 if isinstance(skeys, (list, tuple)) and '.i' in skeys:
806 continue
807
808 scanners.append(scanner)
809
810# add the new swig scanner that we like better
811from SCons.Scanner import ClassicCPP as CPPScanner
812swig_inc_re = '^[ \t]*[%,#][ \t]*(?:include|import)[ \t]*(<|")([^>"]+)(>|")'
813scanners.append(CPPScanner("SwigScan", [ ".i" ], "CPPPATH", swig_inc_re))
814
815# replace the scanners list that has what we want
816main['SCANNERS'] = scanners
817
818# Add a custom Check function to the Configure context so that we can
819# figure out if the compiler adds leading underscores to global
820# variables. This is needed for the autogenerated asm files that we
821# use for embedding the python code.
822def CheckLeading(context):
823 context.Message("Checking for leading underscore in global variables...")
824 # 1) Define a global variable called x from asm so the C compiler
825 # won't change the symbol at all.
826 # 2) Declare that variable.
827 # 3) Use the variable
828 #
829 # If the compiler prepends an underscore, this will successfully
830 # link because the external symbol 'x' will be called '_x' which
831 # was defined by the asm statement. If the compiler does not
832 # prepend an underscore, this will not successfully link because
833 # '_x' will have been defined by assembly, while the C portion of
834 # the code will be trying to use 'x'
835 ret = context.TryLink('''
836 asm(".globl _x; _x: .byte 0");
837 extern int x;
838 int main() { return x; }
839 ''', extension=".c")
840 context.env.Append(LEADING_UNDERSCORE=ret)
841 context.Result(ret)
842 return ret
843
844# Add a custom Check function to test for structure members.
845def CheckMember(context, include, decl, member, include_quotes="<>"):
846 context.Message("Checking for member %s in %s..." %
847 (member, decl))
848 text = """
849#include %(header)s
850int main(){
851 %(decl)s test;
852 (void)test.%(member)s;
853 return 0;
854};
855""" % { "header" : include_quotes[0] + include + include_quotes[1],
856 "decl" : decl,
857 "member" : member,
858 }
859
860 ret = context.TryCompile(text, extension=".cc")
861 context.Result(ret)
862 return ret
863
864# Platform-specific configuration. Note again that we assume that all
865# builds under a given build root run on the same host platform.
866conf = Configure(main,
867 conf_dir = joinpath(build_root, '.scons_config'),
868 log_file = joinpath(build_root, 'scons_config.log'),
869 custom_tests = {
870 'CheckLeading' : CheckLeading,
871 'CheckMember' : CheckMember,
872 })
873
874# Check for leading underscores. Don't really need to worry either
875# way so don't need to check the return code.
876conf.CheckLeading()
877
878# Check if we should compile a 64 bit binary on Mac OS X/Darwin
879try:
880 import platform
881 uname = platform.uname()
882 if uname[0] == 'Darwin' and compareVersions(uname[2], '9.0.0') >= 0:
883 if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]):
884 main.Append(CCFLAGS=['-arch', 'x86_64'])
885 main.Append(CFLAGS=['-arch', 'x86_64'])
886 main.Append(LINKFLAGS=['-arch', 'x86_64'])
887 main.Append(ASFLAGS=['-arch', 'x86_64'])
888except:
889 pass
890
891# Recent versions of scons substitute a "Null" object for Configure()
892# when configuration isn't necessary, e.g., if the "--help" option is
893# present. Unfortuantely this Null object always returns false,
894# breaking all our configuration checks. We replace it with our own
895# more optimistic null object that returns True instead.
896if not conf:
897 def NullCheck(*args, **kwargs):
898 return True
899
900 class NullConf:
901 def __init__(self, env):
902 self.env = env
903 def Finish(self):
904 return self.env
905 def __getattr__(self, mname):
906 return NullCheck
907
908 conf = NullConf(main)
909
910# Cache build files in the supplied directory.
911if main['M5_BUILD_CACHE']:
912 print 'Using build cache located at', main['M5_BUILD_CACHE']
913 CacheDir(main['M5_BUILD_CACHE'])
914
915if not GetOption('without_python'):
916 # Find Python include and library directories for embedding the
917 # interpreter. We rely on python-config to resolve the appropriate
918 # includes and linker flags. ParseConfig does not seem to understand
919 # the more exotic linker flags such as -Xlinker and -export-dynamic so
920 # we add them explicitly below. If you want to link in an alternate
921 # version of python, see above for instructions on how to invoke
922 # scons with the appropriate PATH set.
923 #
924 # First we check if python2-config exists, else we use python-config
925 python_config = readCommand(['which', 'python2-config'],
926 exception='').strip()
927 if not os.path.exists(python_config):
928 python_config = readCommand(['which', 'python-config'],
929 exception='').strip()
930 py_includes = readCommand([python_config, '--includes'],
931 exception='').split()
932 # Strip the -I from the include folders before adding them to the
933 # CPPPATH
934 main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes))
935
936 # Read the linker flags and split them into libraries and other link
937 # flags. The libraries are added later through the call the CheckLib.
938 py_ld_flags = readCommand([python_config, '--ldflags'],
939 exception='').split()
940 py_libs = []
941 for lib in py_ld_flags:
942 if not lib.startswith('-l'):
943 main.Append(LINKFLAGS=[lib])
944 else:
945 lib = lib[2:]
946 if lib not in py_libs:
947 py_libs.append(lib)
948
949 # verify that this stuff works
950 if not conf.CheckHeader('Python.h', '<>'):
951 print "Error: can't find Python.h header in", py_includes
952 print "Install Python headers (package python-dev on Ubuntu and RedHat)"
953 Exit(1)
954
955 for lib in py_libs:
956 if not conf.CheckLib(lib):
957 print "Error: can't find library %s required by python" % lib
958 Exit(1)
959
960# On Solaris you need to use libsocket for socket ops
961if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'):
962 if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'):
963 print "Can't find library with socket calls (e.g. accept())"
964 Exit(1)
965
966# Check for zlib. If the check passes, libz will be automatically
967# added to the LIBS environment variable.
968if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'):
969 print 'Error: did not find needed zlib compression library '\
970 'and/or zlib.h header file.'
971 print ' Please install zlib and try again.'
972 Exit(1)
973
974# If we have the protobuf compiler, also make sure we have the
975# development libraries. If the check passes, libprotobuf will be
976# automatically added to the LIBS environment variable. After
977# this, we can use the HAVE_PROTOBUF flag to determine if we have
978# got both protoc and libprotobuf available.
979main['HAVE_PROTOBUF'] = main['PROTOC'] and \
980 conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h',
981 'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;')
982
983# If we have the compiler but not the library, print another warning.
984if main['PROTOC'] and not main['HAVE_PROTOBUF']:
985 print termcap.Yellow + termcap.Bold + \
986 'Warning: did not find protocol buffer library and/or headers.\n' + \
987 ' Please install libprotobuf-dev for tracing support.' + \
988 termcap.Normal
989
990# Check for librt.
991have_posix_clock = \
992 conf.CheckLibWithHeader(None, 'time.h', 'C',
993 'clock_nanosleep(0,0,NULL,NULL);') or \
994 conf.CheckLibWithHeader('rt', 'time.h', 'C',
995 'clock_nanosleep(0,0,NULL,NULL);')
996
997have_posix_timers = \
998 conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C',
999 'timer_create(CLOCK_MONOTONIC, NULL, NULL);')
1000
1001if not GetOption('without_tcmalloc'):
1002 if conf.CheckLib('tcmalloc'):
1003 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
1004 elif conf.CheckLib('tcmalloc_minimal'):
1005 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
1006 else:
1007 print termcap.Yellow + termcap.Bold + \
1008 "You can get a 12% performance improvement by "\
1009 "installing tcmalloc (libgoogle-perftools-dev package "\
1010 "on Ubuntu or RedHat)." + termcap.Normal
1011
1012if not have_posix_clock:
1013 print "Can't find library for POSIX clocks."
1014
1015# Check for <fenv.h> (C99 FP environment control)
1016have_fenv = conf.CheckHeader('fenv.h', '<>')
1017if not have_fenv:
1018 print "Warning: Header file <fenv.h> not found."
1019 print " This host has no IEEE FP rounding mode control."
1020
1021# Check if we should enable KVM-based hardware virtualization. The API
1022# we rely on exists since version 2.6.36 of the kernel, but somehow
1023# the KVM_API_VERSION does not reflect the change. We test for one of
1024# the types as a fall back.
1025have_kvm = conf.CheckHeader('linux/kvm.h', '<>') and \
1026 conf.CheckTypeSize('struct kvm_xsave', '#include <linux/kvm.h>') != 0
1027if not have_kvm:
1028 print "Info: Compatible header file <linux/kvm.h> not found, " \
1029 "disabling KVM support."
1030
1031# Check if the requested target ISA is compatible with the host
1032def is_isa_kvm_compatible(isa):
1033 isa_comp_table = {
1034 "arm" : ( "armv7l" ),
1035 "x86" : ( "x86_64" ),
1036 }
1037 try:
1038 import platform
1039 host_isa = platform.machine()
1040 except:
1041 print "Warning: Failed to determine host ISA."
1042 return False
1043
1044 return host_isa in isa_comp_table.get(isa, [])
1045
1046
1047# Check if the exclude_host attribute is available. We want this to
1048# get accurate instruction counts in KVM.
1049main['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf.CheckMember(
1050 'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host')
1051
1052
1053######################################################################
1054#
1055# Finish the configuration
1056#
1057main = conf.Finish()
1058
1059######################################################################
1060#
1061# Collect all non-global variables
1062#
1063
1064# Define the universe of supported ISAs
1065all_isa_list = [ ]
1066Export('all_isa_list')
1067
1068class CpuModel(object):
1069 '''The CpuModel class encapsulates everything the ISA parser needs to
1070 know about a particular CPU model.'''
1071
1072 # Dict of available CPU model objects. Accessible as CpuModel.dict.
1073 dict = {}
1074
1075 # Constructor. Automatically adds models to CpuModel.dict.
1076 def __init__(self, name, default=False):
1077 self.name = name # name of model
1078
1079 # This cpu is enabled by default
1080 self.default = default
1081
1082 # Add self to dict
1083 if name in CpuModel.dict:
1084 raise AttributeError, "CpuModel '%s' already registered" % name
1085 CpuModel.dict[name] = self
1086
1087Export('CpuModel')
1088
1089# Sticky variables get saved in the variables file so they persist from
1090# one invocation to the next (unless overridden, in which case the new
1091# value becomes sticky).
1092sticky_vars = Variables(args=ARGUMENTS)
1093Export('sticky_vars')
1094
1095# Sticky variables that should be exported
1096export_vars = []
1097Export('export_vars')
1098
1099# For Ruby
1100all_protocols = []
1101Export('all_protocols')
1102protocol_dirs = []
1103Export('protocol_dirs')
1104slicc_includes = []
1105Export('slicc_includes')
1106
1107# Walk the tree and execute all SConsopts scripts that wil add to the
1108# above variables
1109if GetOption('verbose'):
1110 print "Reading SConsopts"
1111for bdir in [ base_dir ] + extras_dir_list:
1112 if not isdir(bdir):
1113 print "Error: directory '%s' does not exist" % bdir
1114 Exit(1)
1115 for root, dirs, files in os.walk(bdir):
1116 if 'SConsopts' in files:
1117 if GetOption('verbose'):
1118 print "Reading", joinpath(root, 'SConsopts')
1119 SConscript(joinpath(root, 'SConsopts'))
1120
1121all_isa_list.sort()
1122
1123sticky_vars.AddVariables(
1124 EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list),
1125 ListVariable('CPU_MODELS', 'CPU models',
1126 sorted(n for n,m in CpuModel.dict.iteritems() if m.default),
1127 sorted(CpuModel.dict.keys())),
1128 BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger',
1129 False),
1130 BoolVariable('SS_COMPATIBLE_FP',
1131 'Make floating-point results compatible with SimpleScalar',
1132 False),
1133 BoolVariable('USE_SSE2',
1134 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts',
1135 False),
1136 BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock),
1137 BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv),
1138 BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability', False),
1139 BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models', have_kvm),
1140 EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None',
1141 all_protocols),
1142 )
1143
1144# These variables get exported to #defines in config/*.hh (see src/SConscript).
1145export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'CP_ANNOTATE',
1146 'USE_POSIX_CLOCK', 'USE_KVM', 'PROTOCOL', 'HAVE_PROTOBUF',
1147 'HAVE_PERF_ATTR_EXCLUDE_HOST']
1148
1149###################################################
1150#
1151# Define a SCons builder for configuration flag headers.
1152#
1153###################################################
1154
1155# This function generates a config header file that #defines the
1156# variable symbol to the current variable setting (0 or 1). The source
1157# operands are the name of the variable and a Value node containing the
1158# value of the variable.
1159def build_config_file(target, source, env):
1160 (variable, value) = [s.get_contents() for s in source]
1161 f = file(str(target[0]), 'w')
1162 print >> f, '#define', variable, value
1163 f.close()
1164 return None
1165
1166# Combine the two functions into a scons Action object.
1167config_action = MakeAction(build_config_file, Transform("CONFIG H", 2))
1168
1169# The emitter munges the source & target node lists to reflect what
1170# we're really doing.
1171def config_emitter(target, source, env):
1172 # extract variable name from Builder arg
1173 variable = str(target[0])
1174 # True target is config header file
1175 target = joinpath('config', variable.lower() + '.hh')
1176 val = env[variable]
1177 if isinstance(val, bool):
1178 # Force value to 0/1
1179 val = int(val)
1180 elif isinstance(val, str):
1181 val = '"' + val + '"'
1182
1183 # Sources are variable name & value (packaged in SCons Value nodes)
1184 return ([target], [Value(variable), Value(val)])
1185
1186config_builder = Builder(emitter = config_emitter, action = config_action)
1187
1188main.Append(BUILDERS = { 'ConfigFile' : config_builder })
1189
1190# libelf build is shared across all configs in the build root.
1191main.SConscript('ext/libelf/SConscript',
1192 variant_dir = joinpath(build_root, 'libelf'))
1193
1194# gzstream build is shared across all configs in the build root.
1195main.SConscript('ext/gzstream/SConscript',
1196 variant_dir = joinpath(build_root, 'gzstream'))
1197
1198# libfdt build is shared across all configs in the build root.
1199main.SConscript('ext/libfdt/SConscript',
1200 variant_dir = joinpath(build_root, 'libfdt'))
1201
1202# fputils build is shared across all configs in the build root.
1203main.SConscript('ext/fputils/SConscript',
1204 variant_dir = joinpath(build_root, 'fputils'))
1205
1206# DRAMSim2 build is shared across all configs in the build root.
1207main.SConscript('ext/dramsim2/SConscript',
1208 variant_dir = joinpath(build_root, 'dramsim2'))
1209
1210# DRAMPower build is shared across all configs in the build root.
1211main.SConscript('ext/drampower/SConscript',
1212 variant_dir = joinpath(build_root, 'drampower'))
1213
1214###################################################
1215#
1216# This function is used to set up a directory with switching headers
1217#
1218###################################################
1219
1220main['ALL_ISA_LIST'] = all_isa_list
1221all_isa_deps = {}
1222def make_switching_dir(dname, switch_headers, env):
1223 # Generate the header. target[0] is the full path of the output
1224 # header to generate. 'source' is a dummy variable, since we get the
1225 # list of ISAs from env['ALL_ISA_LIST'].
1226 def gen_switch_hdr(target, source, env):
1227 fname = str(target[0])
1228 isa = env['TARGET_ISA'].lower()
1229 try:
1230 f = open(fname, 'w')
1231 print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname))
1232 f.close()
1233 except IOError:
1234 print "Failed to create %s" % fname
1235 raise
1236
1237 # Build SCons Action object. 'varlist' specifies env vars that this
1238 # action depends on; when env['ALL_ISA_LIST'] changes these actions
1239 # should get re-executed.
1240 switch_hdr_action = MakeAction(gen_switch_hdr,
1241 Transform("GENERATE"), varlist=['ALL_ISA_LIST'])
1242
1243 # Instantiate actions for each header
1244 for hdr in switch_headers:
1245 env.Command(hdr, [], switch_hdr_action)
1246
1247 isa_target = Dir('.').up().name.lower().replace('_', '-')
1248 env['PHONY_BASE'] = '#'+isa_target
1249 all_isa_deps[isa_target] = None
1250
1251Export('make_switching_dir')
1252
1253# all-isas -> all-deps -> all-environs -> all_targets
1254main.Alias('#all-isas', [])
1255main.Alias('#all-deps', '#all-isas')
1256
1257# Dummy target to ensure all environments are created before telling
1258# SCons what to actually make (the command line arguments). We attach
1259# them to the dependence graph after the environments are complete.
1260ORIG_BUILD_TARGETS = list(BUILD_TARGETS) # force a copy; gets closure to work.
1261def environsComplete(target, source, env):
1262 for t in ORIG_BUILD_TARGETS:
1263 main.Depends('#all-targets', t)
1264
1265# Each build/* switching_dir attaches its *-environs target to #all-environs.
1266main.Append(BUILDERS = {'CompleteEnvirons' :
1267 Builder(action=MakeAction(environsComplete, None))})
1268main.CompleteEnvirons('#all-environs', [])
1269
1270def doNothing(**ignored): pass
1271main.Append(BUILDERS = {'Dummy': Builder(action=MakeAction(doNothing, None))})
1272
1273# The final target to which all the original targets ultimately get attached.
1274main.Dummy('#all-targets', '#all-environs')
1275BUILD_TARGETS[:] = ['#all-targets']
1276
1277###################################################
1278#
1279# Define build environments for selected configurations.
1280#
1281###################################################
1282
1283for variant_path in variant_paths:
1284 if not GetOption('silent'):
1285 print "Building in", variant_path
1286
1287 # Make a copy of the build-root environment to use for this config.
1288 env = main.Clone()
1289 env['BUILDDIR'] = variant_path
1290
1291 # variant_dir is the tail component of build path, and is used to
1292 # determine the build parameters (e.g., 'ALPHA_SE')
1293 (build_root, variant_dir) = splitpath(variant_path)
1294
1295 # Set env variables according to the build directory config.
1296 sticky_vars.files = []
1297 # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in
1298 # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke
1299 # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings.
1300 current_vars_file = joinpath(build_root, 'variables', variant_dir)
1301 if isfile(current_vars_file):
1302 sticky_vars.files.append(current_vars_file)
1303 if not GetOption('silent'):
1304 print "Using saved variables file %s" % current_vars_file
1305 else:
1306 # Build dir-specific variables file doesn't exist.
1307
1308 # Make sure the directory is there so we can create it later
1309 opt_dir = dirname(current_vars_file)
1310 if not isdir(opt_dir):
1311 mkdir(opt_dir)
1312
1313 # Get default build variables from source tree. Variables are
1314 # normally determined by name of $VARIANT_DIR, but can be
1315 # overridden by '--default=' arg on command line.
1316 default = GetOption('default')
1317 opts_dir = joinpath(main.root.abspath, 'build_opts')
1318 if default:
1319 default_vars_files = [joinpath(build_root, 'variables', default),
1320 joinpath(opts_dir, default)]
1321 else:
1322 default_vars_files = [joinpath(opts_dir, variant_dir)]
1323 existing_files = filter(isfile, default_vars_files)
1324 if existing_files:
1325 default_vars_file = existing_files[0]
1326 sticky_vars.files.append(default_vars_file)
1327 print "Variables file %s not found,\n using defaults in %s" \
1328 % (current_vars_file, default_vars_file)
1329 else:
1330 print "Error: cannot find variables file %s or " \
1331 "default file(s) %s" \
1332 % (current_vars_file, ' or '.join(default_vars_files))
1333 Exit(1)
1334
1335 # Apply current variable settings to env
1336 sticky_vars.Update(env)
1337
1338 help_texts["local_vars"] += \
1339 "Build variables for %s:\n" % variant_dir \
1340 + sticky_vars.GenerateHelpText(env)
1341
1342 # Process variable settings.
1343
1344 if not have_fenv and env['USE_FENV']:
1345 print "Warning: <fenv.h> not available; " \
1346 "forcing USE_FENV to False in", variant_dir + "."
1347 env['USE_FENV'] = False
1348
1349 if not env['USE_FENV']:
1350 print "Warning: No IEEE FP rounding mode control in", variant_dir + "."
1351 print " FP results may deviate slightly from other platforms."
1352
1353 if env['EFENCE']:
1354 env.Append(LIBS=['efence'])
1355
1356 if env['USE_KVM']:
1357 if not have_kvm:
1358 print "Warning: Can not enable KVM, host seems to lack KVM support"
1359 env['USE_KVM'] = False
1360 elif not have_posix_timers:
1361 print "Warning: Can not enable KVM, host seems to lack support " \
1362 "for POSIX timers"
1363 env['USE_KVM'] = False
1364 elif not is_isa_kvm_compatible(env['TARGET_ISA']):
1365 print "Info: KVM support disabled due to unsupported host and " \
1366 "target ISA combination"
1367 env['USE_KVM'] = False
1368
1369 # Warn about missing optional functionality
1370 if env['USE_KVM']:
1371 if not main['HAVE_PERF_ATTR_EXCLUDE_HOST']:
1372 print "Warning: perf_event headers lack support for the " \
1373 "exclude_host attribute. KVM instruction counts will " \
1374 "be inaccurate."
1375
1376 # Save sticky variable settings back to current variables file
1377 sticky_vars.Save(current_vars_file, env)
1378
1379 if env['USE_SSE2']:
1380 env.Append(CCFLAGS=['-msse2'])
1381
1382 # The src/SConscript file sets up the build rules in 'env' according
1383 # to the configured variables. It returns a list of environments,
1384 # one for each variant build (debug, opt, etc.)
1385 SConscript('src/SConscript', variant_dir = variant_path, exports = 'env')
1386
1387def pairwise(iterable):
1388 "s -> (s0,s1), (s1,s2), (s2, s3), ..."
1389 a, b = itertools.tee(iterable)
1390 b.next()
1391 return itertools.izip(a, b)
1392
1393# Create false dependencies so SCons will parse ISAs, establish
1394# dependencies, and setup the build Environments serially. Either
1395# SCons (likely) and/or our SConscripts (possibly) cannot cope with -j
1396# greater than 1. It appears to be standard race condition stuff; it
1397# doesn't always fail, but usually, and the behaviors are different.
1398# Every time I tried to remove this, builds would fail in some
1399# creative new way. So, don't do that. You'll want to, though, because
1400# tests/SConscript takes a long time to make its Environments.
1401for t1, t2 in pairwise(sorted(all_isa_deps.iterkeys())):
1402 main.Depends('#%s-deps' % t2, '#%s-deps' % t1)
1403 main.Depends('#%s-environs' % t2, '#%s-environs' % t1)
1404
1405# base help text
1406Help('''
1407Usage: scons [scons options] [build variables] [target(s)]
1408
1409Extra scons options:
1410%(options)s
1411
1412Global build variables:
1413%(global_vars)s
1414
1415%(local_vars)s
1416''' % help_texts)