SConstruct (10841:38af38f1f307) SConstruct (10860:cba0f26038b4)
1# -*- mode:python -*-
2
3# Copyright (c) 2013, 2015 ARM Limited
4# All rights reserved.
5#
6# The license below extends only to copyright in the software and shall
7# not be construed as granting a license to any other intellectual
8# property including but not limited to intellectual property relating
9# to a hardware implementation of the functionality of the software
10# licensed hereunder. You may use the software subject to the license
11# terms below provided that you ensure that this notice is replicated
12# unmodified and in its entirety in all distributions of the software,
13# modified or unmodified, in source code or in binary form.
14#
15# Copyright (c) 2011 Advanced Micro Devices, Inc.
16# Copyright (c) 2009 The Hewlett-Packard Development Company
17# Copyright (c) 2004-2005 The Regents of The University of Michigan
18# All rights reserved.
19#
20# Redistribution and use in source and binary forms, with or without
21# modification, are permitted provided that the following conditions are
22# met: redistributions of source code must retain the above copyright
23# notice, this list of conditions and the following disclaimer;
24# redistributions in binary form must reproduce the above copyright
25# notice, this list of conditions and the following disclaimer in the
26# documentation and/or other materials provided with the distribution;
27# neither the name of the copyright holders nor the names of its
28# contributors may be used to endorse or promote products derived from
29# this software without specific prior written permission.
30#
31# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
32# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
33# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
34# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
35# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
37# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
38# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
39# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
40# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
41# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
42#
43# Authors: Steve Reinhardt
44# Nathan Binkert
45
46###################################################
47#
48# SCons top-level build description (SConstruct) file.
49#
50# While in this directory ('gem5'), just type 'scons' to build the default
51# configuration (see below), or type 'scons build/<CONFIG>/<binary>'
52# to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for
53# the optimized full-system version).
54#
55# You can build gem5 in a different directory as long as there is a
56# 'build/<CONFIG>' somewhere along the target path. The build system
57# expects that all configs under the same build directory are being
58# built for the same host system.
59#
60# Examples:
61#
62# The following two commands are equivalent. The '-u' option tells
63# scons to search up the directory tree for this SConstruct file.
64# % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug
65# % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug
66#
67# The following two commands are equivalent and demonstrate building
68# in a directory outside of the source tree. The '-C' option tells
69# scons to chdir to the specified directory to find this SConstruct
70# file.
71# % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug
72# % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug
73#
74# You can use 'scons -H' to print scons options. If you're in this
75# 'gem5' directory (or use -u or -C to tell scons where to find this
76# file), you can use 'scons -h' to print all the gem5-specific build
77# options as well.
78#
79###################################################
80
81# Check for recent-enough Python and SCons versions.
82try:
83 # Really old versions of scons only take two options for the
84 # function, so check once without the revision and once with the
85 # revision, the first instance will fail for stuff other than
86 # 0.98, and the second will fail for 0.98.0
87 EnsureSConsVersion(0, 98)
88 EnsureSConsVersion(0, 98, 1)
89except SystemExit, e:
90 print """
91For more details, see:
92 http://gem5.org/Dependencies
93"""
94 raise
95
96# We ensure the python version early because because python-config
97# requires python 2.5
98try:
99 EnsurePythonVersion(2, 5)
100except SystemExit, e:
101 print """
102You can use a non-default installation of the Python interpreter by
103rearranging your PATH so that scons finds the non-default 'python' and
104'python-config' first.
105
106For more details, see:
107 http://gem5.org/wiki/index.php/Using_a_non-default_Python_installation
108"""
109 raise
110
111# Global Python includes
112import itertools
113import os
114import re
115import subprocess
116import sys
117
118from os import mkdir, environ
119from os.path import abspath, basename, dirname, expanduser, normpath
120from os.path import exists, isdir, isfile
121from os.path import join as joinpath, split as splitpath
122
123# SCons includes
124import SCons
125import SCons.Node
126
127extra_python_paths = [
128 Dir('src/python').srcnode().abspath, # gem5 includes
129 Dir('ext/ply').srcnode().abspath, # ply is used by several files
130 ]
131
132sys.path[1:1] = extra_python_paths
133
134from m5.util import compareVersions, readCommand
135from m5.util.terminal import get_termcap
136
137help_texts = {
138 "options" : "",
139 "global_vars" : "",
140 "local_vars" : ""
141}
142
143Export("help_texts")
144
145
146# There's a bug in scons in that (1) by default, the help texts from
147# AddOption() are supposed to be displayed when you type 'scons -h'
148# and (2) you can override the help displayed by 'scons -h' using the
149# Help() function, but these two features are incompatible: once
150# you've overridden the help text using Help(), there's no way to get
151# at the help texts from AddOptions. See:
152# http://scons.tigris.org/issues/show_bug.cgi?id=2356
153# http://scons.tigris.org/issues/show_bug.cgi?id=2611
154# This hack lets us extract the help text from AddOptions and
155# re-inject it via Help(). Ideally someday this bug will be fixed and
156# we can just use AddOption directly.
157def AddLocalOption(*args, **kwargs):
158 col_width = 30
159
160 help = " " + ", ".join(args)
161 if "help" in kwargs:
162 length = len(help)
163 if length >= col_width:
164 help += "\n" + " " * col_width
165 else:
166 help += " " * (col_width - length)
167 help += kwargs["help"]
168 help_texts["options"] += help + "\n"
169
170 AddOption(*args, **kwargs)
171
172AddLocalOption('--colors', dest='use_colors', action='store_true',
173 help="Add color to abbreviated scons output")
174AddLocalOption('--no-colors', dest='use_colors', action='store_false',
175 help="Don't add color to abbreviated scons output")
176AddLocalOption('--with-cxx-config', dest='with_cxx_config',
177 action='store_true',
178 help="Build with support for C++-based configuration")
179AddLocalOption('--default', dest='default', type='string', action='store',
180 help='Override which build_opts file to use for defaults')
181AddLocalOption('--ignore-style', dest='ignore_style', action='store_true',
182 help='Disable style checking hooks')
183AddLocalOption('--no-lto', dest='no_lto', action='store_true',
184 help='Disable Link-Time Optimization for fast')
185AddLocalOption('--update-ref', dest='update_ref', action='store_true',
186 help='Update test reference outputs')
187AddLocalOption('--verbose', dest='verbose', action='store_true',
188 help='Print full tool command lines')
189AddLocalOption('--without-python', dest='without_python',
190 action='store_true',
191 help='Build without Python configuration support')
192AddLocalOption('--without-tcmalloc', dest='without_tcmalloc',
193 action='store_true',
194 help='Disable linking against tcmalloc')
195AddLocalOption('--with-ubsan', dest='with_ubsan', action='store_true',
196 help='Build with Undefined Behavior Sanitizer if available')
197
198termcap = get_termcap(GetOption('use_colors'))
199
200########################################################################
201#
202# Set up the main build environment.
203#
204########################################################################
205
206# export TERM so that clang reports errors in color
207use_vars = set([ 'AS', 'AR', 'CC', 'CXX', 'HOME', 'LD_LIBRARY_PATH',
208 'LIBRARY_PATH', 'PATH', 'PKG_CONFIG_PATH', 'PROTOC',
209 'PYTHONPATH', 'RANLIB', 'SWIG', 'TERM' ])
210
211use_prefixes = [
212 "M5", # M5 configuration (e.g., path to kernels)
213 "DISTCC_", # distcc (distributed compiler wrapper) configuration
214 "CCACHE_", # ccache (caching compiler wrapper) configuration
215 "CCC_", # clang static analyzer configuration
216 ]
217
218use_env = {}
219for key,val in sorted(os.environ.iteritems()):
220 if key in use_vars or \
221 any([key.startswith(prefix) for prefix in use_prefixes]):
222 use_env[key] = val
223
224# Tell scons to avoid implicit command dependencies to avoid issues
225# with the param wrappes being compiled twice (see
226# http://scons.tigris.org/issues/show_bug.cgi?id=2811)
227main = Environment(ENV=use_env, IMPLICIT_COMMAND_DEPENDENCIES=0)
228main.Decider('MD5-timestamp')
229main.root = Dir(".") # The current directory (where this file lives).
230main.srcdir = Dir("src") # The source directory
231
232main_dict_keys = main.Dictionary().keys()
233
234# Check that we have a C/C++ compiler
235if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys):
236 print "No C++ compiler installed (package g++ on Ubuntu and RedHat)"
237 Exit(1)
238
239# Check that swig is present
240if not 'SWIG' in main_dict_keys:
241 print "swig is not installed (package swig on Ubuntu and RedHat)"
242 Exit(1)
243
244# add useful python code PYTHONPATH so it can be used by subprocesses
245# as well
246main.AppendENVPath('PYTHONPATH', extra_python_paths)
247
248########################################################################
249#
250# Mercurial Stuff.
251#
252# If the gem5 directory is a mercurial repository, we should do some
253# extra things.
254#
255########################################################################
256
257hgdir = main.root.Dir(".hg")
258
259mercurial_style_message = """
260You're missing the gem5 style hook, which automatically checks your code
261against the gem5 style rules on hg commit and qrefresh commands. This
262script will now install the hook in your .hg/hgrc file.
263Press enter to continue, or ctrl-c to abort: """
264
265mercurial_style_hook = """
266# The following lines were automatically added by gem5/SConstruct
267# to provide the gem5 style-checking hooks
268[extensions]
269style = %s/util/style.py
270
271[hooks]
272pretxncommit.style = python:style.check_style
273pre-qrefresh.style = python:style.check_style
274# End of SConstruct additions
275
276""" % (main.root.abspath)
277
278mercurial_lib_not_found = """
279Mercurial libraries cannot be found, ignoring style hook. If
280you are a gem5 developer, please fix this and run the style
281hook. It is important.
282"""
283
284# Check for style hook and prompt for installation if it's not there.
285# Skip this if --ignore-style was specified, there's no .hg dir to
286# install a hook in, or there's no interactive terminal to prompt.
287if not GetOption('ignore_style') and hgdir.exists() and sys.stdin.isatty():
288 style_hook = True
289 try:
290 from mercurial import ui
291 ui = ui.ui()
292 ui.readconfig(hgdir.File('hgrc').abspath)
293 style_hook = ui.config('hooks', 'pretxncommit.style', None) and \
294 ui.config('hooks', 'pre-qrefresh.style', None)
295 except ImportError:
296 print mercurial_lib_not_found
297
298 if not style_hook:
299 print mercurial_style_message,
300 # continue unless user does ctrl-c/ctrl-d etc.
301 try:
302 raw_input()
303 except:
304 print "Input exception, exiting scons.\n"
305 sys.exit(1)
306 hgrc_path = '%s/.hg/hgrc' % main.root.abspath
307 print "Adding style hook to", hgrc_path, "\n"
308 try:
309 hgrc = open(hgrc_path, 'a')
310 hgrc.write(mercurial_style_hook)
311 hgrc.close()
312 except:
313 print "Error updating", hgrc_path
314 sys.exit(1)
315
316
317###################################################
318#
319# Figure out which configurations to set up based on the path(s) of
320# the target(s).
321#
322###################################################
323
324# Find default configuration & binary.
325Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug'))
326
327# helper function: find last occurrence of element in list
328def rfind(l, elt, offs = -1):
329 for i in range(len(l)+offs, 0, -1):
330 if l[i] == elt:
331 return i
332 raise ValueError, "element not found"
333
334# Take a list of paths (or SCons Nodes) and return a list with all
335# paths made absolute and ~-expanded. Paths will be interpreted
336# relative to the launch directory unless a different root is provided
337def makePathListAbsolute(path_list, root=GetLaunchDir()):
338 return [abspath(joinpath(root, expanduser(str(p))))
339 for p in path_list]
340
341# Each target must have 'build' in the interior of the path; the
342# directory below this will determine the build parameters. For
343# example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we
344# recognize that ALPHA_SE specifies the configuration because it
345# follow 'build' in the build path.
346
347# The funky assignment to "[:]" is needed to replace the list contents
348# in place rather than reassign the symbol to a new list, which
349# doesn't work (obviously!).
350BUILD_TARGETS[:] = makePathListAbsolute(BUILD_TARGETS)
351
352# Generate a list of the unique build roots and configs that the
353# collected targets reference.
354variant_paths = []
355build_root = None
356for t in BUILD_TARGETS:
357 path_dirs = t.split('/')
358 try:
359 build_top = rfind(path_dirs, 'build', -2)
360 except:
361 print "Error: no non-leaf 'build' dir found on target path", t
362 Exit(1)
363 this_build_root = joinpath('/',*path_dirs[:build_top+1])
364 if not build_root:
365 build_root = this_build_root
366 else:
367 if this_build_root != build_root:
368 print "Error: build targets not under same build root\n"\
369 " %s\n %s" % (build_root, this_build_root)
370 Exit(1)
371 variant_path = joinpath('/',*path_dirs[:build_top+2])
372 if variant_path not in variant_paths:
373 variant_paths.append(variant_path)
374
375# Make sure build_root exists (might not if this is the first build there)
376if not isdir(build_root):
377 mkdir(build_root)
378main['BUILDROOT'] = build_root
379
380Export('main')
381
382main.SConsignFile(joinpath(build_root, "sconsign"))
383
384# Default duplicate option is to use hard links, but this messes up
385# when you use emacs to edit a file in the target dir, as emacs moves
386# file to file~ then copies to file, breaking the link. Symbolic
387# (soft) links work better.
388main.SetOption('duplicate', 'soft-copy')
389
390#
391# Set up global sticky variables... these are common to an entire build
392# tree (not specific to a particular build like ALPHA_SE)
393#
394
395global_vars_file = joinpath(build_root, 'variables.global')
396
397global_vars = Variables(global_vars_file, args=ARGUMENTS)
398
399global_vars.AddVariables(
400 ('CC', 'C compiler', environ.get('CC', main['CC'])),
401 ('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])),
402 ('SWIG', 'SWIG tool', environ.get('SWIG', main['SWIG'])),
403 ('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')),
404 ('BATCH', 'Use batch pool for build and tests', False),
405 ('BATCH_CMD', 'Batch pool submission command name', 'qdo'),
406 ('M5_BUILD_CACHE', 'Cache built objects in this directory', False),
407 ('EXTRAS', 'Add extra directories to the compilation', '')
408 )
409
410# Update main environment with values from ARGUMENTS & global_vars_file
411global_vars.Update(main)
412help_texts["global_vars"] += global_vars.GenerateHelpText(main)
413
414# Save sticky variable settings back to current variables file
415global_vars.Save(global_vars_file, main)
416
417# Parse EXTRAS variable to build list of all directories where we're
418# look for sources etc. This list is exported as extras_dir_list.
419base_dir = main.srcdir.abspath
420if main['EXTRAS']:
421 extras_dir_list = makePathListAbsolute(main['EXTRAS'].split(':'))
422else:
423 extras_dir_list = []
424
425Export('base_dir')
426Export('extras_dir_list')
427
428# the ext directory should be on the #includes path
429main.Append(CPPPATH=[Dir('ext')])
430
431def strip_build_path(path, env):
432 path = str(path)
433 variant_base = env['BUILDROOT'] + os.path.sep
434 if path.startswith(variant_base):
435 path = path[len(variant_base):]
436 elif path.startswith('build/'):
437 path = path[6:]
438 return path
439
440# Generate a string of the form:
441# common/path/prefix/src1, src2 -> tgt1, tgt2
442# to print while building.
443class Transform(object):
444 # all specific color settings should be here and nowhere else
445 tool_color = termcap.Normal
446 pfx_color = termcap.Yellow
447 srcs_color = termcap.Yellow + termcap.Bold
448 arrow_color = termcap.Blue + termcap.Bold
449 tgts_color = termcap.Yellow + termcap.Bold
450
451 def __init__(self, tool, max_sources=99):
452 self.format = self.tool_color + (" [%8s] " % tool) \
453 + self.pfx_color + "%s" \
454 + self.srcs_color + "%s" \
455 + self.arrow_color + " -> " \
456 + self.tgts_color + "%s" \
457 + termcap.Normal
458 self.max_sources = max_sources
459
460 def __call__(self, target, source, env, for_signature=None):
461 # truncate source list according to max_sources param
462 source = source[0:self.max_sources]
463 def strip(f):
464 return strip_build_path(str(f), env)
465 if len(source) > 0:
466 srcs = map(strip, source)
467 else:
468 srcs = ['']
469 tgts = map(strip, target)
470 # surprisingly, os.path.commonprefix is a dumb char-by-char string
471 # operation that has nothing to do with paths.
472 com_pfx = os.path.commonprefix(srcs + tgts)
473 com_pfx_len = len(com_pfx)
474 if com_pfx:
475 # do some cleanup and sanity checking on common prefix
476 if com_pfx[-1] == ".":
477 # prefix matches all but file extension: ok
478 # back up one to change 'foo.cc -> o' to 'foo.cc -> .o'
479 com_pfx = com_pfx[0:-1]
480 elif com_pfx[-1] == "/":
481 # common prefix is directory path: OK
482 pass
483 else:
484 src0_len = len(srcs[0])
485 tgt0_len = len(tgts[0])
486 if src0_len == com_pfx_len:
487 # source is a substring of target, OK
488 pass
489 elif tgt0_len == com_pfx_len:
490 # target is a substring of source, need to back up to
491 # avoid empty string on RHS of arrow
492 sep_idx = com_pfx.rfind(".")
493 if sep_idx != -1:
494 com_pfx = com_pfx[0:sep_idx]
495 else:
496 com_pfx = ''
497 elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".":
498 # still splitting at file extension: ok
499 pass
500 else:
501 # probably a fluke; ignore it
502 com_pfx = ''
503 # recalculate length in case com_pfx was modified
504 com_pfx_len = len(com_pfx)
505 def fmt(files):
506 f = map(lambda s: s[com_pfx_len:], files)
507 return ', '.join(f)
508 return self.format % (com_pfx, fmt(srcs), fmt(tgts))
509
510Export('Transform')
511
512# enable the regression script to use the termcap
513main['TERMCAP'] = termcap
514
515if GetOption('verbose'):
516 def MakeAction(action, string, *args, **kwargs):
517 return Action(action, *args, **kwargs)
518else:
519 MakeAction = Action
520 main['CCCOMSTR'] = Transform("CC")
521 main['CXXCOMSTR'] = Transform("CXX")
522 main['ASCOMSTR'] = Transform("AS")
523 main['SWIGCOMSTR'] = Transform("SWIG")
524 main['ARCOMSTR'] = Transform("AR", 0)
525 main['LINKCOMSTR'] = Transform("LINK", 0)
526 main['RANLIBCOMSTR'] = Transform("RANLIB", 0)
527 main['M4COMSTR'] = Transform("M4")
528 main['SHCCCOMSTR'] = Transform("SHCC")
529 main['SHCXXCOMSTR'] = Transform("SHCXX")
530Export('MakeAction')
531
532# Initialize the Link-Time Optimization (LTO) flags
533main['LTO_CCFLAGS'] = []
534main['LTO_LDFLAGS'] = []
535
536# According to the readme, tcmalloc works best if the compiler doesn't
537# assume that we're using the builtin malloc and friends. These flags
538# are compiler-specific, so we need to set them after we detect which
539# compiler we're using.
540main['TCMALLOC_CCFLAGS'] = []
541
542CXX_version = readCommand([main['CXX'],'--version'], exception=False)
543CXX_V = readCommand([main['CXX'],'-V'], exception=False)
544
545main['GCC'] = CXX_version and CXX_version.find('g++') >= 0
546main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0
547if main['GCC'] + main['CLANG'] > 1:
548 print 'Error: How can we have two at the same time?'
549 Exit(1)
550
551# Set up default C++ compiler flags
552if main['GCC'] or main['CLANG']:
553 # As gcc and clang share many flags, do the common parts here
554 main.Append(CCFLAGS=['-pipe'])
555 main.Append(CCFLAGS=['-fno-strict-aliasing'])
556 # Enable -Wall and then disable the few warnings that we
557 # consistently violate
558 main.Append(CCFLAGS=['-Wall', '-Wno-sign-compare', '-Wundef'])
559 # We always compile using C++11, but only gcc >= 4.7 and clang 3.1
560 # actually use that name, so we stick with c++0x
561 main.Append(CXXFLAGS=['-std=c++0x'])
562 # Add selected sanity checks from -Wextra
563 main.Append(CXXFLAGS=['-Wmissing-field-initializers',
564 '-Woverloaded-virtual'])
565else:
566 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
567 print "Don't know what compiler options to use for your compiler."
568 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX']
569 print termcap.Yellow + ' version:' + termcap.Normal,
570 if not CXX_version:
571 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
572 termcap.Normal
573 else:
574 print CXX_version.replace('\n', '<nl>')
575 print " If you're trying to use a compiler other than GCC"
576 print " or clang, there appears to be something wrong with your"
577 print " environment."
578 print " "
579 print " If you are trying to use a compiler other than those listed"
580 print " above you will need to ease fix SConstruct and "
581 print " src/SConscript to support that compiler."
582 Exit(1)
583
584if main['GCC']:
585 # Check for a supported version of gcc. >= 4.6 is chosen for its
586 # level of c++11 support. See
587 # http://gcc.gnu.org/projects/cxx0x.html for details. 4.6 is also
588 # the first version with proper LTO support.
589 gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False)
590 if compareVersions(gcc_version, "4.6") < 0:
591 print 'Error: gcc version 4.6 or newer required.'
592 print ' Installed version:', gcc_version
593 Exit(1)
594
595 main['GCC_VERSION'] = gcc_version
596
597 # gcc from version 4.8 and above generates "rep; ret" instructions
598 # to avoid performance penalties on certain AMD chips. Older
599 # assemblers detect this as an error, "Error: expecting string
600 # instruction after `rep'"
601 if compareVersions(gcc_version, "4.8") > 0:
602 as_version = readCommand([main['AS'], '-v', '/dev/null'],
603 exception=False).split()
604
605 if not as_version or compareVersions(as_version[-1], "2.23") < 0:
606 print termcap.Yellow + termcap.Bold + \
607 'Warning: This combination of gcc and binutils have' + \
608 ' known incompatibilities.\n' + \
609 ' If you encounter build problems, please update ' + \
610 'binutils to 2.23.' + \
611 termcap.Normal
612
613 # Make sure we warn if the user has requested to compile with the
614 # Undefined Benahvior Sanitizer and this version of gcc does not
615 # support it.
616 if GetOption('with_ubsan') and \
617 compareVersions(gcc_version, '4.9') < 0:
618 print termcap.Yellow + termcap.Bold + \
619 'Warning: UBSan is only supported using gcc 4.9 and later.' + \
620 termcap.Normal
621
622 # Add the appropriate Link-Time Optimization (LTO) flags
623 # unless LTO is explicitly turned off. Note that these flags
624 # are only used by the fast target.
625 if not GetOption('no_lto'):
626 # Pass the LTO flag when compiling to produce GIMPLE
627 # output, we merely create the flags here and only append
628 # them later
629 main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
630
631 # Use the same amount of jobs for LTO as we are running
632 # scons with
633 main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
634
635 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc',
636 '-fno-builtin-realloc', '-fno-builtin-free'])
637
638elif main['CLANG']:
639 # Check for a supported version of clang, >= 3.0 is needed to
640 # support similar features as gcc 4.6. See
641 # http://clang.llvm.org/cxx_status.html for details
642 clang_version_re = re.compile(".* version (\d+\.\d+)")
643 clang_version_match = clang_version_re.search(CXX_version)
644 if (clang_version_match):
645 clang_version = clang_version_match.groups()[0]
646 if compareVersions(clang_version, "3.0") < 0:
647 print 'Error: clang version 3.0 or newer required.'
648 print ' Installed version:', clang_version
649 Exit(1)
650 else:
651 print 'Error: Unable to determine clang version.'
652 Exit(1)
653
654 # clang has a few additional warnings that we disable,
655 # tautological comparisons are allowed due to unsigned integers
656 # being compared to constants that happen to be 0, and extraneous
657 # parantheses are allowed due to Ruby's printing of the AST,
658 # finally self assignments are allowed as the generated CPU code
659 # is relying on this
660 main.Append(CCFLAGS=['-Wno-tautological-compare',
661 '-Wno-parentheses',
662 '-Wno-self-assign',
663 # Some versions of libstdc++ (4.8?) seem to
664 # use struct hash and class hash
665 # interchangeably.
666 '-Wno-mismatched-tags',
667 ])
668
669 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin'])
670
671 # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as
672 # opposed to libstdc++, as the later is dated.
673 if sys.platform == "darwin":
674 main.Append(CXXFLAGS=['-stdlib=libc++'])
675 main.Append(LIBS=['c++'])
676
677else:
678 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
679 print "Don't know what compiler options to use for your compiler."
680 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX']
681 print termcap.Yellow + ' version:' + termcap.Normal,
682 if not CXX_version:
683 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
684 termcap.Normal
685 else:
686 print CXX_version.replace('\n', '<nl>')
687 print " If you're trying to use a compiler other than GCC"
688 print " or clang, there appears to be something wrong with your"
689 print " environment."
690 print " "
691 print " If you are trying to use a compiler other than those listed"
692 print " above you will need to ease fix SConstruct and "
693 print " src/SConscript to support that compiler."
694 Exit(1)
695
696# Set up common yacc/bison flags (needed for Ruby)
697main['YACCFLAGS'] = '-d'
698main['YACCHXXFILESUFFIX'] = '.hh'
699
700# Do this after we save setting back, or else we'll tack on an
701# extra 'qdo' every time we run scons.
702if main['BATCH']:
703 main['CC'] = main['BATCH_CMD'] + ' ' + main['CC']
704 main['CXX'] = main['BATCH_CMD'] + ' ' + main['CXX']
705 main['AS'] = main['BATCH_CMD'] + ' ' + main['AS']
706 main['AR'] = main['BATCH_CMD'] + ' ' + main['AR']
707 main['RANLIB'] = main['BATCH_CMD'] + ' ' + main['RANLIB']
708
709if sys.platform == 'cygwin':
710 # cygwin has some header file issues...
711 main.Append(CCFLAGS=["-Wno-uninitialized"])
712
713# Check for the protobuf compiler
714protoc_version = readCommand([main['PROTOC'], '--version'],
715 exception='').split()
716
717# First two words should be "libprotoc x.y.z"
718if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc':
719 print termcap.Yellow + termcap.Bold + \
720 'Warning: Protocol buffer compiler (protoc) not found.\n' + \
721 ' Please install protobuf-compiler for tracing support.' + \
722 termcap.Normal
723 main['PROTOC'] = False
724else:
725 # Based on the availability of the compress stream wrappers,
726 # require 2.1.0
727 min_protoc_version = '2.1.0'
728 if compareVersions(protoc_version[1], min_protoc_version) < 0:
729 print termcap.Yellow + termcap.Bold + \
730 'Warning: protoc version', min_protoc_version, \
731 'or newer required.\n' + \
732 ' Installed version:', protoc_version[1], \
733 termcap.Normal
734 main['PROTOC'] = False
735 else:
736 # Attempt to determine the appropriate include path and
737 # library path using pkg-config, that means we also need to
738 # check for pkg-config. Note that it is possible to use
739 # protobuf without the involvement of pkg-config. Later on we
740 # check go a library config check and at that point the test
741 # will fail if libprotobuf cannot be found.
742 if readCommand(['pkg-config', '--version'], exception=''):
743 try:
744 # Attempt to establish what linking flags to add for protobuf
745 # using pkg-config
746 main.ParseConfig('pkg-config --cflags --libs-only-L protobuf')
747 except:
748 print termcap.Yellow + termcap.Bold + \
749 'Warning: pkg-config could not get protobuf flags.' + \
750 termcap.Normal
751
752# Check for SWIG
753if not main.has_key('SWIG'):
754 print 'Error: SWIG utility not found.'
755 print ' Please install (see http://www.swig.org) and retry.'
756 Exit(1)
757
758# Check for appropriate SWIG version
759swig_version = readCommand([main['SWIG'], '-version'], exception='').split()
760# First 3 words should be "SWIG Version x.y.z"
761if len(swig_version) < 3 or \
762 swig_version[0] != 'SWIG' or swig_version[1] != 'Version':
763 print 'Error determining SWIG version.'
764 Exit(1)
765
766min_swig_version = '2.0.4'
767if compareVersions(swig_version[2], min_swig_version) < 0:
768 print 'Error: SWIG version', min_swig_version, 'or newer required.'
769 print ' Installed version:', swig_version[2]
770 Exit(1)
771
772# Check for known incompatibilities. The standard library shipped with
773# gcc >= 4.9 does not play well with swig versions prior to 3.0
774if main['GCC'] and compareVersions(gcc_version, '4.9') >= 0 and \
775 compareVersions(swig_version[2], '3.0') < 0:
776 print termcap.Yellow + termcap.Bold + \
777 'Warning: This combination of gcc and swig have' + \
778 ' known incompatibilities.\n' + \
779 ' If you encounter build problems, please update ' + \
780 'swig to 3.0 or later.' + \
781 termcap.Normal
782
783# Set up SWIG flags & scanner
784swig_flags=Split('-c++ -python -modern -templatereduce $_CPPINCFLAGS')
785main.Append(SWIGFLAGS=swig_flags)
786
787# Check for 'timeout' from GNU coreutils. If present, regressions will
788# be run with a time limit. We require version 8.13 since we rely on
789# support for the '--foreground' option.
790timeout_lines = readCommand(['timeout', '--version'],
791 exception='').splitlines()
792# Get the first line and tokenize it
793timeout_version = timeout_lines[0].split() if timeout_lines else []
794main['TIMEOUT'] = timeout_version and \
795 compareVersions(timeout_version[-1], '8.13') >= 0
796
797# filter out all existing swig scanners, they mess up the dependency
798# stuff for some reason
799scanners = []
800for scanner in main['SCANNERS']:
801 skeys = scanner.skeys
802 if skeys == '.i':
803 continue
804
805 if isinstance(skeys, (list, tuple)) and '.i' in skeys:
806 continue
807
808 scanners.append(scanner)
809
810# add the new swig scanner that we like better
811from SCons.Scanner import ClassicCPP as CPPScanner
812swig_inc_re = '^[ \t]*[%,#][ \t]*(?:include|import)[ \t]*(<|")([^>"]+)(>|")'
813scanners.append(CPPScanner("SwigScan", [ ".i" ], "CPPPATH", swig_inc_re))
814
815# replace the scanners list that has what we want
816main['SCANNERS'] = scanners
817
818# Add a custom Check function to the Configure context so that we can
819# figure out if the compiler adds leading underscores to global
820# variables. This is needed for the autogenerated asm files that we
821# use for embedding the python code.
822def CheckLeading(context):
823 context.Message("Checking for leading underscore in global variables...")
824 # 1) Define a global variable called x from asm so the C compiler
825 # won't change the symbol at all.
826 # 2) Declare that variable.
827 # 3) Use the variable
828 #
829 # If the compiler prepends an underscore, this will successfully
830 # link because the external symbol 'x' will be called '_x' which
831 # was defined by the asm statement. If the compiler does not
832 # prepend an underscore, this will not successfully link because
833 # '_x' will have been defined by assembly, while the C portion of
834 # the code will be trying to use 'x'
835 ret = context.TryLink('''
836 asm(".globl _x; _x: .byte 0");
837 extern int x;
838 int main() { return x; }
839 ''', extension=".c")
840 context.env.Append(LEADING_UNDERSCORE=ret)
841 context.Result(ret)
842 return ret
843
844# Add a custom Check function to test for structure members.
845def CheckMember(context, include, decl, member, include_quotes="<>"):
846 context.Message("Checking for member %s in %s..." %
847 (member, decl))
848 text = """
849#include %(header)s
850int main(){
851 %(decl)s test;
852 (void)test.%(member)s;
853 return 0;
854};
855""" % { "header" : include_quotes[0] + include + include_quotes[1],
856 "decl" : decl,
857 "member" : member,
858 }
859
860 ret = context.TryCompile(text, extension=".cc")
861 context.Result(ret)
862 return ret
863
864# Platform-specific configuration. Note again that we assume that all
865# builds under a given build root run on the same host platform.
866conf = Configure(main,
867 conf_dir = joinpath(build_root, '.scons_config'),
868 log_file = joinpath(build_root, 'scons_config.log'),
869 custom_tests = {
870 'CheckLeading' : CheckLeading,
871 'CheckMember' : CheckMember,
872 })
873
874# Check for leading underscores. Don't really need to worry either
875# way so don't need to check the return code.
876conf.CheckLeading()
877
878# Check if we should compile a 64 bit binary on Mac OS X/Darwin
879try:
880 import platform
881 uname = platform.uname()
882 if uname[0] == 'Darwin' and compareVersions(uname[2], '9.0.0') >= 0:
883 if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]):
884 main.Append(CCFLAGS=['-arch', 'x86_64'])
885 main.Append(CFLAGS=['-arch', 'x86_64'])
886 main.Append(LINKFLAGS=['-arch', 'x86_64'])
887 main.Append(ASFLAGS=['-arch', 'x86_64'])
888except:
889 pass
890
891# Recent versions of scons substitute a "Null" object for Configure()
892# when configuration isn't necessary, e.g., if the "--help" option is
893# present. Unfortuantely this Null object always returns false,
894# breaking all our configuration checks. We replace it with our own
895# more optimistic null object that returns True instead.
896if not conf:
897 def NullCheck(*args, **kwargs):
898 return True
899
900 class NullConf:
901 def __init__(self, env):
902 self.env = env
903 def Finish(self):
904 return self.env
905 def __getattr__(self, mname):
906 return NullCheck
907
908 conf = NullConf(main)
909
910# Cache build files in the supplied directory.
911if main['M5_BUILD_CACHE']:
912 print 'Using build cache located at', main['M5_BUILD_CACHE']
913 CacheDir(main['M5_BUILD_CACHE'])
914
915if not GetOption('without_python'):
916 # Find Python include and library directories for embedding the
917 # interpreter. We rely on python-config to resolve the appropriate
918 # includes and linker flags. ParseConfig does not seem to understand
919 # the more exotic linker flags such as -Xlinker and -export-dynamic so
920 # we add them explicitly below. If you want to link in an alternate
921 # version of python, see above for instructions on how to invoke
922 # scons with the appropriate PATH set.
923 #
924 # First we check if python2-config exists, else we use python-config
925 python_config = readCommand(['which', 'python2-config'],
926 exception='').strip()
927 if not os.path.exists(python_config):
928 python_config = readCommand(['which', 'python-config'],
929 exception='').strip()
930 py_includes = readCommand([python_config, '--includes'],
931 exception='').split()
932 # Strip the -I from the include folders before adding them to the
933 # CPPPATH
934 main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes))
935
936 # Read the linker flags and split them into libraries and other link
937 # flags. The libraries are added later through the call the CheckLib.
938 py_ld_flags = readCommand([python_config, '--ldflags'],
939 exception='').split()
940 py_libs = []
941 for lib in py_ld_flags:
942 if not lib.startswith('-l'):
943 main.Append(LINKFLAGS=[lib])
944 else:
945 lib = lib[2:]
946 if lib not in py_libs:
947 py_libs.append(lib)
948
949 # verify that this stuff works
950 if not conf.CheckHeader('Python.h', '<>'):
951 print "Error: can't find Python.h header in", py_includes
952 print "Install Python headers (package python-dev on Ubuntu and RedHat)"
953 Exit(1)
954
955 for lib in py_libs:
956 if not conf.CheckLib(lib):
957 print "Error: can't find library %s required by python" % lib
958 Exit(1)
959
960# On Solaris you need to use libsocket for socket ops
961if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'):
962 if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'):
963 print "Can't find library with socket calls (e.g. accept())"
964 Exit(1)
965
966# Check for zlib. If the check passes, libz will be automatically
967# added to the LIBS environment variable.
968if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'):
969 print 'Error: did not find needed zlib compression library '\
970 'and/or zlib.h header file.'
971 print ' Please install zlib and try again.'
972 Exit(1)
973
974# If we have the protobuf compiler, also make sure we have the
975# development libraries. If the check passes, libprotobuf will be
976# automatically added to the LIBS environment variable. After
977# this, we can use the HAVE_PROTOBUF flag to determine if we have
978# got both protoc and libprotobuf available.
979main['HAVE_PROTOBUF'] = main['PROTOC'] and \
980 conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h',
981 'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;')
982
983# If we have the compiler but not the library, print another warning.
984if main['PROTOC'] and not main['HAVE_PROTOBUF']:
985 print termcap.Yellow + termcap.Bold + \
986 'Warning: did not find protocol buffer library and/or headers.\n' + \
987 ' Please install libprotobuf-dev for tracing support.' + \
988 termcap.Normal
989
990# Check for librt.
991have_posix_clock = \
992 conf.CheckLibWithHeader(None, 'time.h', 'C',
993 'clock_nanosleep(0,0,NULL,NULL);') or \
994 conf.CheckLibWithHeader('rt', 'time.h', 'C',
995 'clock_nanosleep(0,0,NULL,NULL);')
996
997have_posix_timers = \
998 conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C',
999 'timer_create(CLOCK_MONOTONIC, NULL, NULL);')
1000
1001if not GetOption('without_tcmalloc'):
1002 if conf.CheckLib('tcmalloc'):
1003 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
1004 elif conf.CheckLib('tcmalloc_minimal'):
1005 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
1006 else:
1007 print termcap.Yellow + termcap.Bold + \
1008 "You can get a 12% performance improvement by "\
1009 "installing tcmalloc (libgoogle-perftools-dev package "\
1010 "on Ubuntu or RedHat)." + termcap.Normal
1011
1012if not have_posix_clock:
1013 print "Can't find library for POSIX clocks."
1014
1015# Check for <fenv.h> (C99 FP environment control)
1016have_fenv = conf.CheckHeader('fenv.h', '<>')
1017if not have_fenv:
1018 print "Warning: Header file <fenv.h> not found."
1019 print " This host has no IEEE FP rounding mode control."
1020
1021# Check if we should enable KVM-based hardware virtualization. The API
1022# we rely on exists since version 2.6.36 of the kernel, but somehow
1023# the KVM_API_VERSION does not reflect the change. We test for one of
1024# the types as a fall back.
1025have_kvm = conf.CheckHeader('linux/kvm.h', '<>')
1026if not have_kvm:
1027 print "Info: Compatible header file <linux/kvm.h> not found, " \
1028 "disabling KVM support."
1029
1030# x86 needs support for xsave. We test for the structure here since we
1031# won't be able to run new tests by the time we know which ISA we're
1032# targeting.
1033have_kvm_xsave = conf.CheckTypeSize('struct kvm_xsave',
1034 '#include <linux/kvm.h>') != 0
1035
1036# Check if the requested target ISA is compatible with the host
1037def is_isa_kvm_compatible(isa):
1038 try:
1039 import platform
1040 host_isa = platform.machine()
1041 except:
1042 print "Warning: Failed to determine host ISA."
1043 return False
1044
1045 if not have_posix_timers:
1046 print "Warning: Can not enable KVM, host seems to lack support " \
1047 "for POSIX timers"
1048 return False
1049
1050 if isa == "arm":
1# -*- mode:python -*-
2
3# Copyright (c) 2013, 2015 ARM Limited
4# All rights reserved.
5#
6# The license below extends only to copyright in the software and shall
7# not be construed as granting a license to any other intellectual
8# property including but not limited to intellectual property relating
9# to a hardware implementation of the functionality of the software
10# licensed hereunder. You may use the software subject to the license
11# terms below provided that you ensure that this notice is replicated
12# unmodified and in its entirety in all distributions of the software,
13# modified or unmodified, in source code or in binary form.
14#
15# Copyright (c) 2011 Advanced Micro Devices, Inc.
16# Copyright (c) 2009 The Hewlett-Packard Development Company
17# Copyright (c) 2004-2005 The Regents of The University of Michigan
18# All rights reserved.
19#
20# Redistribution and use in source and binary forms, with or without
21# modification, are permitted provided that the following conditions are
22# met: redistributions of source code must retain the above copyright
23# notice, this list of conditions and the following disclaimer;
24# redistributions in binary form must reproduce the above copyright
25# notice, this list of conditions and the following disclaimer in the
26# documentation and/or other materials provided with the distribution;
27# neither the name of the copyright holders nor the names of its
28# contributors may be used to endorse or promote products derived from
29# this software without specific prior written permission.
30#
31# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
32# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
33# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
34# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
35# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
37# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
38# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
39# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
40# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
41# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
42#
43# Authors: Steve Reinhardt
44# Nathan Binkert
45
46###################################################
47#
48# SCons top-level build description (SConstruct) file.
49#
50# While in this directory ('gem5'), just type 'scons' to build the default
51# configuration (see below), or type 'scons build/<CONFIG>/<binary>'
52# to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for
53# the optimized full-system version).
54#
55# You can build gem5 in a different directory as long as there is a
56# 'build/<CONFIG>' somewhere along the target path. The build system
57# expects that all configs under the same build directory are being
58# built for the same host system.
59#
60# Examples:
61#
62# The following two commands are equivalent. The '-u' option tells
63# scons to search up the directory tree for this SConstruct file.
64# % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug
65# % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug
66#
67# The following two commands are equivalent and demonstrate building
68# in a directory outside of the source tree. The '-C' option tells
69# scons to chdir to the specified directory to find this SConstruct
70# file.
71# % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug
72# % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug
73#
74# You can use 'scons -H' to print scons options. If you're in this
75# 'gem5' directory (or use -u or -C to tell scons where to find this
76# file), you can use 'scons -h' to print all the gem5-specific build
77# options as well.
78#
79###################################################
80
81# Check for recent-enough Python and SCons versions.
82try:
83 # Really old versions of scons only take two options for the
84 # function, so check once without the revision and once with the
85 # revision, the first instance will fail for stuff other than
86 # 0.98, and the second will fail for 0.98.0
87 EnsureSConsVersion(0, 98)
88 EnsureSConsVersion(0, 98, 1)
89except SystemExit, e:
90 print """
91For more details, see:
92 http://gem5.org/Dependencies
93"""
94 raise
95
96# We ensure the python version early because because python-config
97# requires python 2.5
98try:
99 EnsurePythonVersion(2, 5)
100except SystemExit, e:
101 print """
102You can use a non-default installation of the Python interpreter by
103rearranging your PATH so that scons finds the non-default 'python' and
104'python-config' first.
105
106For more details, see:
107 http://gem5.org/wiki/index.php/Using_a_non-default_Python_installation
108"""
109 raise
110
111# Global Python includes
112import itertools
113import os
114import re
115import subprocess
116import sys
117
118from os import mkdir, environ
119from os.path import abspath, basename, dirname, expanduser, normpath
120from os.path import exists, isdir, isfile
121from os.path import join as joinpath, split as splitpath
122
123# SCons includes
124import SCons
125import SCons.Node
126
127extra_python_paths = [
128 Dir('src/python').srcnode().abspath, # gem5 includes
129 Dir('ext/ply').srcnode().abspath, # ply is used by several files
130 ]
131
132sys.path[1:1] = extra_python_paths
133
134from m5.util import compareVersions, readCommand
135from m5.util.terminal import get_termcap
136
137help_texts = {
138 "options" : "",
139 "global_vars" : "",
140 "local_vars" : ""
141}
142
143Export("help_texts")
144
145
146# There's a bug in scons in that (1) by default, the help texts from
147# AddOption() are supposed to be displayed when you type 'scons -h'
148# and (2) you can override the help displayed by 'scons -h' using the
149# Help() function, but these two features are incompatible: once
150# you've overridden the help text using Help(), there's no way to get
151# at the help texts from AddOptions. See:
152# http://scons.tigris.org/issues/show_bug.cgi?id=2356
153# http://scons.tigris.org/issues/show_bug.cgi?id=2611
154# This hack lets us extract the help text from AddOptions and
155# re-inject it via Help(). Ideally someday this bug will be fixed and
156# we can just use AddOption directly.
157def AddLocalOption(*args, **kwargs):
158 col_width = 30
159
160 help = " " + ", ".join(args)
161 if "help" in kwargs:
162 length = len(help)
163 if length >= col_width:
164 help += "\n" + " " * col_width
165 else:
166 help += " " * (col_width - length)
167 help += kwargs["help"]
168 help_texts["options"] += help + "\n"
169
170 AddOption(*args, **kwargs)
171
172AddLocalOption('--colors', dest='use_colors', action='store_true',
173 help="Add color to abbreviated scons output")
174AddLocalOption('--no-colors', dest='use_colors', action='store_false',
175 help="Don't add color to abbreviated scons output")
176AddLocalOption('--with-cxx-config', dest='with_cxx_config',
177 action='store_true',
178 help="Build with support for C++-based configuration")
179AddLocalOption('--default', dest='default', type='string', action='store',
180 help='Override which build_opts file to use for defaults')
181AddLocalOption('--ignore-style', dest='ignore_style', action='store_true',
182 help='Disable style checking hooks')
183AddLocalOption('--no-lto', dest='no_lto', action='store_true',
184 help='Disable Link-Time Optimization for fast')
185AddLocalOption('--update-ref', dest='update_ref', action='store_true',
186 help='Update test reference outputs')
187AddLocalOption('--verbose', dest='verbose', action='store_true',
188 help='Print full tool command lines')
189AddLocalOption('--without-python', dest='without_python',
190 action='store_true',
191 help='Build without Python configuration support')
192AddLocalOption('--without-tcmalloc', dest='without_tcmalloc',
193 action='store_true',
194 help='Disable linking against tcmalloc')
195AddLocalOption('--with-ubsan', dest='with_ubsan', action='store_true',
196 help='Build with Undefined Behavior Sanitizer if available')
197
198termcap = get_termcap(GetOption('use_colors'))
199
200########################################################################
201#
202# Set up the main build environment.
203#
204########################################################################
205
206# export TERM so that clang reports errors in color
207use_vars = set([ 'AS', 'AR', 'CC', 'CXX', 'HOME', 'LD_LIBRARY_PATH',
208 'LIBRARY_PATH', 'PATH', 'PKG_CONFIG_PATH', 'PROTOC',
209 'PYTHONPATH', 'RANLIB', 'SWIG', 'TERM' ])
210
211use_prefixes = [
212 "M5", # M5 configuration (e.g., path to kernels)
213 "DISTCC_", # distcc (distributed compiler wrapper) configuration
214 "CCACHE_", # ccache (caching compiler wrapper) configuration
215 "CCC_", # clang static analyzer configuration
216 ]
217
218use_env = {}
219for key,val in sorted(os.environ.iteritems()):
220 if key in use_vars or \
221 any([key.startswith(prefix) for prefix in use_prefixes]):
222 use_env[key] = val
223
224# Tell scons to avoid implicit command dependencies to avoid issues
225# with the param wrappes being compiled twice (see
226# http://scons.tigris.org/issues/show_bug.cgi?id=2811)
227main = Environment(ENV=use_env, IMPLICIT_COMMAND_DEPENDENCIES=0)
228main.Decider('MD5-timestamp')
229main.root = Dir(".") # The current directory (where this file lives).
230main.srcdir = Dir("src") # The source directory
231
232main_dict_keys = main.Dictionary().keys()
233
234# Check that we have a C/C++ compiler
235if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys):
236 print "No C++ compiler installed (package g++ on Ubuntu and RedHat)"
237 Exit(1)
238
239# Check that swig is present
240if not 'SWIG' in main_dict_keys:
241 print "swig is not installed (package swig on Ubuntu and RedHat)"
242 Exit(1)
243
244# add useful python code PYTHONPATH so it can be used by subprocesses
245# as well
246main.AppendENVPath('PYTHONPATH', extra_python_paths)
247
248########################################################################
249#
250# Mercurial Stuff.
251#
252# If the gem5 directory is a mercurial repository, we should do some
253# extra things.
254#
255########################################################################
256
257hgdir = main.root.Dir(".hg")
258
259mercurial_style_message = """
260You're missing the gem5 style hook, which automatically checks your code
261against the gem5 style rules on hg commit and qrefresh commands. This
262script will now install the hook in your .hg/hgrc file.
263Press enter to continue, or ctrl-c to abort: """
264
265mercurial_style_hook = """
266# The following lines were automatically added by gem5/SConstruct
267# to provide the gem5 style-checking hooks
268[extensions]
269style = %s/util/style.py
270
271[hooks]
272pretxncommit.style = python:style.check_style
273pre-qrefresh.style = python:style.check_style
274# End of SConstruct additions
275
276""" % (main.root.abspath)
277
278mercurial_lib_not_found = """
279Mercurial libraries cannot be found, ignoring style hook. If
280you are a gem5 developer, please fix this and run the style
281hook. It is important.
282"""
283
284# Check for style hook and prompt for installation if it's not there.
285# Skip this if --ignore-style was specified, there's no .hg dir to
286# install a hook in, or there's no interactive terminal to prompt.
287if not GetOption('ignore_style') and hgdir.exists() and sys.stdin.isatty():
288 style_hook = True
289 try:
290 from mercurial import ui
291 ui = ui.ui()
292 ui.readconfig(hgdir.File('hgrc').abspath)
293 style_hook = ui.config('hooks', 'pretxncommit.style', None) and \
294 ui.config('hooks', 'pre-qrefresh.style', None)
295 except ImportError:
296 print mercurial_lib_not_found
297
298 if not style_hook:
299 print mercurial_style_message,
300 # continue unless user does ctrl-c/ctrl-d etc.
301 try:
302 raw_input()
303 except:
304 print "Input exception, exiting scons.\n"
305 sys.exit(1)
306 hgrc_path = '%s/.hg/hgrc' % main.root.abspath
307 print "Adding style hook to", hgrc_path, "\n"
308 try:
309 hgrc = open(hgrc_path, 'a')
310 hgrc.write(mercurial_style_hook)
311 hgrc.close()
312 except:
313 print "Error updating", hgrc_path
314 sys.exit(1)
315
316
317###################################################
318#
319# Figure out which configurations to set up based on the path(s) of
320# the target(s).
321#
322###################################################
323
324# Find default configuration & binary.
325Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug'))
326
327# helper function: find last occurrence of element in list
328def rfind(l, elt, offs = -1):
329 for i in range(len(l)+offs, 0, -1):
330 if l[i] == elt:
331 return i
332 raise ValueError, "element not found"
333
334# Take a list of paths (or SCons Nodes) and return a list with all
335# paths made absolute and ~-expanded. Paths will be interpreted
336# relative to the launch directory unless a different root is provided
337def makePathListAbsolute(path_list, root=GetLaunchDir()):
338 return [abspath(joinpath(root, expanduser(str(p))))
339 for p in path_list]
340
341# Each target must have 'build' in the interior of the path; the
342# directory below this will determine the build parameters. For
343# example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we
344# recognize that ALPHA_SE specifies the configuration because it
345# follow 'build' in the build path.
346
347# The funky assignment to "[:]" is needed to replace the list contents
348# in place rather than reassign the symbol to a new list, which
349# doesn't work (obviously!).
350BUILD_TARGETS[:] = makePathListAbsolute(BUILD_TARGETS)
351
352# Generate a list of the unique build roots and configs that the
353# collected targets reference.
354variant_paths = []
355build_root = None
356for t in BUILD_TARGETS:
357 path_dirs = t.split('/')
358 try:
359 build_top = rfind(path_dirs, 'build', -2)
360 except:
361 print "Error: no non-leaf 'build' dir found on target path", t
362 Exit(1)
363 this_build_root = joinpath('/',*path_dirs[:build_top+1])
364 if not build_root:
365 build_root = this_build_root
366 else:
367 if this_build_root != build_root:
368 print "Error: build targets not under same build root\n"\
369 " %s\n %s" % (build_root, this_build_root)
370 Exit(1)
371 variant_path = joinpath('/',*path_dirs[:build_top+2])
372 if variant_path not in variant_paths:
373 variant_paths.append(variant_path)
374
375# Make sure build_root exists (might not if this is the first build there)
376if not isdir(build_root):
377 mkdir(build_root)
378main['BUILDROOT'] = build_root
379
380Export('main')
381
382main.SConsignFile(joinpath(build_root, "sconsign"))
383
384# Default duplicate option is to use hard links, but this messes up
385# when you use emacs to edit a file in the target dir, as emacs moves
386# file to file~ then copies to file, breaking the link. Symbolic
387# (soft) links work better.
388main.SetOption('duplicate', 'soft-copy')
389
390#
391# Set up global sticky variables... these are common to an entire build
392# tree (not specific to a particular build like ALPHA_SE)
393#
394
395global_vars_file = joinpath(build_root, 'variables.global')
396
397global_vars = Variables(global_vars_file, args=ARGUMENTS)
398
399global_vars.AddVariables(
400 ('CC', 'C compiler', environ.get('CC', main['CC'])),
401 ('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])),
402 ('SWIG', 'SWIG tool', environ.get('SWIG', main['SWIG'])),
403 ('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')),
404 ('BATCH', 'Use batch pool for build and tests', False),
405 ('BATCH_CMD', 'Batch pool submission command name', 'qdo'),
406 ('M5_BUILD_CACHE', 'Cache built objects in this directory', False),
407 ('EXTRAS', 'Add extra directories to the compilation', '')
408 )
409
410# Update main environment with values from ARGUMENTS & global_vars_file
411global_vars.Update(main)
412help_texts["global_vars"] += global_vars.GenerateHelpText(main)
413
414# Save sticky variable settings back to current variables file
415global_vars.Save(global_vars_file, main)
416
417# Parse EXTRAS variable to build list of all directories where we're
418# look for sources etc. This list is exported as extras_dir_list.
419base_dir = main.srcdir.abspath
420if main['EXTRAS']:
421 extras_dir_list = makePathListAbsolute(main['EXTRAS'].split(':'))
422else:
423 extras_dir_list = []
424
425Export('base_dir')
426Export('extras_dir_list')
427
428# the ext directory should be on the #includes path
429main.Append(CPPPATH=[Dir('ext')])
430
431def strip_build_path(path, env):
432 path = str(path)
433 variant_base = env['BUILDROOT'] + os.path.sep
434 if path.startswith(variant_base):
435 path = path[len(variant_base):]
436 elif path.startswith('build/'):
437 path = path[6:]
438 return path
439
440# Generate a string of the form:
441# common/path/prefix/src1, src2 -> tgt1, tgt2
442# to print while building.
443class Transform(object):
444 # all specific color settings should be here and nowhere else
445 tool_color = termcap.Normal
446 pfx_color = termcap.Yellow
447 srcs_color = termcap.Yellow + termcap.Bold
448 arrow_color = termcap.Blue + termcap.Bold
449 tgts_color = termcap.Yellow + termcap.Bold
450
451 def __init__(self, tool, max_sources=99):
452 self.format = self.tool_color + (" [%8s] " % tool) \
453 + self.pfx_color + "%s" \
454 + self.srcs_color + "%s" \
455 + self.arrow_color + " -> " \
456 + self.tgts_color + "%s" \
457 + termcap.Normal
458 self.max_sources = max_sources
459
460 def __call__(self, target, source, env, for_signature=None):
461 # truncate source list according to max_sources param
462 source = source[0:self.max_sources]
463 def strip(f):
464 return strip_build_path(str(f), env)
465 if len(source) > 0:
466 srcs = map(strip, source)
467 else:
468 srcs = ['']
469 tgts = map(strip, target)
470 # surprisingly, os.path.commonprefix is a dumb char-by-char string
471 # operation that has nothing to do with paths.
472 com_pfx = os.path.commonprefix(srcs + tgts)
473 com_pfx_len = len(com_pfx)
474 if com_pfx:
475 # do some cleanup and sanity checking on common prefix
476 if com_pfx[-1] == ".":
477 # prefix matches all but file extension: ok
478 # back up one to change 'foo.cc -> o' to 'foo.cc -> .o'
479 com_pfx = com_pfx[0:-1]
480 elif com_pfx[-1] == "/":
481 # common prefix is directory path: OK
482 pass
483 else:
484 src0_len = len(srcs[0])
485 tgt0_len = len(tgts[0])
486 if src0_len == com_pfx_len:
487 # source is a substring of target, OK
488 pass
489 elif tgt0_len == com_pfx_len:
490 # target is a substring of source, need to back up to
491 # avoid empty string on RHS of arrow
492 sep_idx = com_pfx.rfind(".")
493 if sep_idx != -1:
494 com_pfx = com_pfx[0:sep_idx]
495 else:
496 com_pfx = ''
497 elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".":
498 # still splitting at file extension: ok
499 pass
500 else:
501 # probably a fluke; ignore it
502 com_pfx = ''
503 # recalculate length in case com_pfx was modified
504 com_pfx_len = len(com_pfx)
505 def fmt(files):
506 f = map(lambda s: s[com_pfx_len:], files)
507 return ', '.join(f)
508 return self.format % (com_pfx, fmt(srcs), fmt(tgts))
509
510Export('Transform')
511
512# enable the regression script to use the termcap
513main['TERMCAP'] = termcap
514
515if GetOption('verbose'):
516 def MakeAction(action, string, *args, **kwargs):
517 return Action(action, *args, **kwargs)
518else:
519 MakeAction = Action
520 main['CCCOMSTR'] = Transform("CC")
521 main['CXXCOMSTR'] = Transform("CXX")
522 main['ASCOMSTR'] = Transform("AS")
523 main['SWIGCOMSTR'] = Transform("SWIG")
524 main['ARCOMSTR'] = Transform("AR", 0)
525 main['LINKCOMSTR'] = Transform("LINK", 0)
526 main['RANLIBCOMSTR'] = Transform("RANLIB", 0)
527 main['M4COMSTR'] = Transform("M4")
528 main['SHCCCOMSTR'] = Transform("SHCC")
529 main['SHCXXCOMSTR'] = Transform("SHCXX")
530Export('MakeAction')
531
532# Initialize the Link-Time Optimization (LTO) flags
533main['LTO_CCFLAGS'] = []
534main['LTO_LDFLAGS'] = []
535
536# According to the readme, tcmalloc works best if the compiler doesn't
537# assume that we're using the builtin malloc and friends. These flags
538# are compiler-specific, so we need to set them after we detect which
539# compiler we're using.
540main['TCMALLOC_CCFLAGS'] = []
541
542CXX_version = readCommand([main['CXX'],'--version'], exception=False)
543CXX_V = readCommand([main['CXX'],'-V'], exception=False)
544
545main['GCC'] = CXX_version and CXX_version.find('g++') >= 0
546main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0
547if main['GCC'] + main['CLANG'] > 1:
548 print 'Error: How can we have two at the same time?'
549 Exit(1)
550
551# Set up default C++ compiler flags
552if main['GCC'] or main['CLANG']:
553 # As gcc and clang share many flags, do the common parts here
554 main.Append(CCFLAGS=['-pipe'])
555 main.Append(CCFLAGS=['-fno-strict-aliasing'])
556 # Enable -Wall and then disable the few warnings that we
557 # consistently violate
558 main.Append(CCFLAGS=['-Wall', '-Wno-sign-compare', '-Wundef'])
559 # We always compile using C++11, but only gcc >= 4.7 and clang 3.1
560 # actually use that name, so we stick with c++0x
561 main.Append(CXXFLAGS=['-std=c++0x'])
562 # Add selected sanity checks from -Wextra
563 main.Append(CXXFLAGS=['-Wmissing-field-initializers',
564 '-Woverloaded-virtual'])
565else:
566 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
567 print "Don't know what compiler options to use for your compiler."
568 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX']
569 print termcap.Yellow + ' version:' + termcap.Normal,
570 if not CXX_version:
571 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
572 termcap.Normal
573 else:
574 print CXX_version.replace('\n', '<nl>')
575 print " If you're trying to use a compiler other than GCC"
576 print " or clang, there appears to be something wrong with your"
577 print " environment."
578 print " "
579 print " If you are trying to use a compiler other than those listed"
580 print " above you will need to ease fix SConstruct and "
581 print " src/SConscript to support that compiler."
582 Exit(1)
583
584if main['GCC']:
585 # Check for a supported version of gcc. >= 4.6 is chosen for its
586 # level of c++11 support. See
587 # http://gcc.gnu.org/projects/cxx0x.html for details. 4.6 is also
588 # the first version with proper LTO support.
589 gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False)
590 if compareVersions(gcc_version, "4.6") < 0:
591 print 'Error: gcc version 4.6 or newer required.'
592 print ' Installed version:', gcc_version
593 Exit(1)
594
595 main['GCC_VERSION'] = gcc_version
596
597 # gcc from version 4.8 and above generates "rep; ret" instructions
598 # to avoid performance penalties on certain AMD chips. Older
599 # assemblers detect this as an error, "Error: expecting string
600 # instruction after `rep'"
601 if compareVersions(gcc_version, "4.8") > 0:
602 as_version = readCommand([main['AS'], '-v', '/dev/null'],
603 exception=False).split()
604
605 if not as_version or compareVersions(as_version[-1], "2.23") < 0:
606 print termcap.Yellow + termcap.Bold + \
607 'Warning: This combination of gcc and binutils have' + \
608 ' known incompatibilities.\n' + \
609 ' If you encounter build problems, please update ' + \
610 'binutils to 2.23.' + \
611 termcap.Normal
612
613 # Make sure we warn if the user has requested to compile with the
614 # Undefined Benahvior Sanitizer and this version of gcc does not
615 # support it.
616 if GetOption('with_ubsan') and \
617 compareVersions(gcc_version, '4.9') < 0:
618 print termcap.Yellow + termcap.Bold + \
619 'Warning: UBSan is only supported using gcc 4.9 and later.' + \
620 termcap.Normal
621
622 # Add the appropriate Link-Time Optimization (LTO) flags
623 # unless LTO is explicitly turned off. Note that these flags
624 # are only used by the fast target.
625 if not GetOption('no_lto'):
626 # Pass the LTO flag when compiling to produce GIMPLE
627 # output, we merely create the flags here and only append
628 # them later
629 main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
630
631 # Use the same amount of jobs for LTO as we are running
632 # scons with
633 main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
634
635 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc',
636 '-fno-builtin-realloc', '-fno-builtin-free'])
637
638elif main['CLANG']:
639 # Check for a supported version of clang, >= 3.0 is needed to
640 # support similar features as gcc 4.6. See
641 # http://clang.llvm.org/cxx_status.html for details
642 clang_version_re = re.compile(".* version (\d+\.\d+)")
643 clang_version_match = clang_version_re.search(CXX_version)
644 if (clang_version_match):
645 clang_version = clang_version_match.groups()[0]
646 if compareVersions(clang_version, "3.0") < 0:
647 print 'Error: clang version 3.0 or newer required.'
648 print ' Installed version:', clang_version
649 Exit(1)
650 else:
651 print 'Error: Unable to determine clang version.'
652 Exit(1)
653
654 # clang has a few additional warnings that we disable,
655 # tautological comparisons are allowed due to unsigned integers
656 # being compared to constants that happen to be 0, and extraneous
657 # parantheses are allowed due to Ruby's printing of the AST,
658 # finally self assignments are allowed as the generated CPU code
659 # is relying on this
660 main.Append(CCFLAGS=['-Wno-tautological-compare',
661 '-Wno-parentheses',
662 '-Wno-self-assign',
663 # Some versions of libstdc++ (4.8?) seem to
664 # use struct hash and class hash
665 # interchangeably.
666 '-Wno-mismatched-tags',
667 ])
668
669 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin'])
670
671 # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as
672 # opposed to libstdc++, as the later is dated.
673 if sys.platform == "darwin":
674 main.Append(CXXFLAGS=['-stdlib=libc++'])
675 main.Append(LIBS=['c++'])
676
677else:
678 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
679 print "Don't know what compiler options to use for your compiler."
680 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX']
681 print termcap.Yellow + ' version:' + termcap.Normal,
682 if not CXX_version:
683 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
684 termcap.Normal
685 else:
686 print CXX_version.replace('\n', '<nl>')
687 print " If you're trying to use a compiler other than GCC"
688 print " or clang, there appears to be something wrong with your"
689 print " environment."
690 print " "
691 print " If you are trying to use a compiler other than those listed"
692 print " above you will need to ease fix SConstruct and "
693 print " src/SConscript to support that compiler."
694 Exit(1)
695
696# Set up common yacc/bison flags (needed for Ruby)
697main['YACCFLAGS'] = '-d'
698main['YACCHXXFILESUFFIX'] = '.hh'
699
700# Do this after we save setting back, or else we'll tack on an
701# extra 'qdo' every time we run scons.
702if main['BATCH']:
703 main['CC'] = main['BATCH_CMD'] + ' ' + main['CC']
704 main['CXX'] = main['BATCH_CMD'] + ' ' + main['CXX']
705 main['AS'] = main['BATCH_CMD'] + ' ' + main['AS']
706 main['AR'] = main['BATCH_CMD'] + ' ' + main['AR']
707 main['RANLIB'] = main['BATCH_CMD'] + ' ' + main['RANLIB']
708
709if sys.platform == 'cygwin':
710 # cygwin has some header file issues...
711 main.Append(CCFLAGS=["-Wno-uninitialized"])
712
713# Check for the protobuf compiler
714protoc_version = readCommand([main['PROTOC'], '--version'],
715 exception='').split()
716
717# First two words should be "libprotoc x.y.z"
718if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc':
719 print termcap.Yellow + termcap.Bold + \
720 'Warning: Protocol buffer compiler (protoc) not found.\n' + \
721 ' Please install protobuf-compiler for tracing support.' + \
722 termcap.Normal
723 main['PROTOC'] = False
724else:
725 # Based on the availability of the compress stream wrappers,
726 # require 2.1.0
727 min_protoc_version = '2.1.0'
728 if compareVersions(protoc_version[1], min_protoc_version) < 0:
729 print termcap.Yellow + termcap.Bold + \
730 'Warning: protoc version', min_protoc_version, \
731 'or newer required.\n' + \
732 ' Installed version:', protoc_version[1], \
733 termcap.Normal
734 main['PROTOC'] = False
735 else:
736 # Attempt to determine the appropriate include path and
737 # library path using pkg-config, that means we also need to
738 # check for pkg-config. Note that it is possible to use
739 # protobuf without the involvement of pkg-config. Later on we
740 # check go a library config check and at that point the test
741 # will fail if libprotobuf cannot be found.
742 if readCommand(['pkg-config', '--version'], exception=''):
743 try:
744 # Attempt to establish what linking flags to add for protobuf
745 # using pkg-config
746 main.ParseConfig('pkg-config --cflags --libs-only-L protobuf')
747 except:
748 print termcap.Yellow + termcap.Bold + \
749 'Warning: pkg-config could not get protobuf flags.' + \
750 termcap.Normal
751
752# Check for SWIG
753if not main.has_key('SWIG'):
754 print 'Error: SWIG utility not found.'
755 print ' Please install (see http://www.swig.org) and retry.'
756 Exit(1)
757
758# Check for appropriate SWIG version
759swig_version = readCommand([main['SWIG'], '-version'], exception='').split()
760# First 3 words should be "SWIG Version x.y.z"
761if len(swig_version) < 3 or \
762 swig_version[0] != 'SWIG' or swig_version[1] != 'Version':
763 print 'Error determining SWIG version.'
764 Exit(1)
765
766min_swig_version = '2.0.4'
767if compareVersions(swig_version[2], min_swig_version) < 0:
768 print 'Error: SWIG version', min_swig_version, 'or newer required.'
769 print ' Installed version:', swig_version[2]
770 Exit(1)
771
772# Check for known incompatibilities. The standard library shipped with
773# gcc >= 4.9 does not play well with swig versions prior to 3.0
774if main['GCC'] and compareVersions(gcc_version, '4.9') >= 0 and \
775 compareVersions(swig_version[2], '3.0') < 0:
776 print termcap.Yellow + termcap.Bold + \
777 'Warning: This combination of gcc and swig have' + \
778 ' known incompatibilities.\n' + \
779 ' If you encounter build problems, please update ' + \
780 'swig to 3.0 or later.' + \
781 termcap.Normal
782
783# Set up SWIG flags & scanner
784swig_flags=Split('-c++ -python -modern -templatereduce $_CPPINCFLAGS')
785main.Append(SWIGFLAGS=swig_flags)
786
787# Check for 'timeout' from GNU coreutils. If present, regressions will
788# be run with a time limit. We require version 8.13 since we rely on
789# support for the '--foreground' option.
790timeout_lines = readCommand(['timeout', '--version'],
791 exception='').splitlines()
792# Get the first line and tokenize it
793timeout_version = timeout_lines[0].split() if timeout_lines else []
794main['TIMEOUT'] = timeout_version and \
795 compareVersions(timeout_version[-1], '8.13') >= 0
796
797# filter out all existing swig scanners, they mess up the dependency
798# stuff for some reason
799scanners = []
800for scanner in main['SCANNERS']:
801 skeys = scanner.skeys
802 if skeys == '.i':
803 continue
804
805 if isinstance(skeys, (list, tuple)) and '.i' in skeys:
806 continue
807
808 scanners.append(scanner)
809
810# add the new swig scanner that we like better
811from SCons.Scanner import ClassicCPP as CPPScanner
812swig_inc_re = '^[ \t]*[%,#][ \t]*(?:include|import)[ \t]*(<|")([^>"]+)(>|")'
813scanners.append(CPPScanner("SwigScan", [ ".i" ], "CPPPATH", swig_inc_re))
814
815# replace the scanners list that has what we want
816main['SCANNERS'] = scanners
817
818# Add a custom Check function to the Configure context so that we can
819# figure out if the compiler adds leading underscores to global
820# variables. This is needed for the autogenerated asm files that we
821# use for embedding the python code.
822def CheckLeading(context):
823 context.Message("Checking for leading underscore in global variables...")
824 # 1) Define a global variable called x from asm so the C compiler
825 # won't change the symbol at all.
826 # 2) Declare that variable.
827 # 3) Use the variable
828 #
829 # If the compiler prepends an underscore, this will successfully
830 # link because the external symbol 'x' will be called '_x' which
831 # was defined by the asm statement. If the compiler does not
832 # prepend an underscore, this will not successfully link because
833 # '_x' will have been defined by assembly, while the C portion of
834 # the code will be trying to use 'x'
835 ret = context.TryLink('''
836 asm(".globl _x; _x: .byte 0");
837 extern int x;
838 int main() { return x; }
839 ''', extension=".c")
840 context.env.Append(LEADING_UNDERSCORE=ret)
841 context.Result(ret)
842 return ret
843
844# Add a custom Check function to test for structure members.
845def CheckMember(context, include, decl, member, include_quotes="<>"):
846 context.Message("Checking for member %s in %s..." %
847 (member, decl))
848 text = """
849#include %(header)s
850int main(){
851 %(decl)s test;
852 (void)test.%(member)s;
853 return 0;
854};
855""" % { "header" : include_quotes[0] + include + include_quotes[1],
856 "decl" : decl,
857 "member" : member,
858 }
859
860 ret = context.TryCompile(text, extension=".cc")
861 context.Result(ret)
862 return ret
863
864# Platform-specific configuration. Note again that we assume that all
865# builds under a given build root run on the same host platform.
866conf = Configure(main,
867 conf_dir = joinpath(build_root, '.scons_config'),
868 log_file = joinpath(build_root, 'scons_config.log'),
869 custom_tests = {
870 'CheckLeading' : CheckLeading,
871 'CheckMember' : CheckMember,
872 })
873
874# Check for leading underscores. Don't really need to worry either
875# way so don't need to check the return code.
876conf.CheckLeading()
877
878# Check if we should compile a 64 bit binary on Mac OS X/Darwin
879try:
880 import platform
881 uname = platform.uname()
882 if uname[0] == 'Darwin' and compareVersions(uname[2], '9.0.0') >= 0:
883 if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]):
884 main.Append(CCFLAGS=['-arch', 'x86_64'])
885 main.Append(CFLAGS=['-arch', 'x86_64'])
886 main.Append(LINKFLAGS=['-arch', 'x86_64'])
887 main.Append(ASFLAGS=['-arch', 'x86_64'])
888except:
889 pass
890
891# Recent versions of scons substitute a "Null" object for Configure()
892# when configuration isn't necessary, e.g., if the "--help" option is
893# present. Unfortuantely this Null object always returns false,
894# breaking all our configuration checks. We replace it with our own
895# more optimistic null object that returns True instead.
896if not conf:
897 def NullCheck(*args, **kwargs):
898 return True
899
900 class NullConf:
901 def __init__(self, env):
902 self.env = env
903 def Finish(self):
904 return self.env
905 def __getattr__(self, mname):
906 return NullCheck
907
908 conf = NullConf(main)
909
910# Cache build files in the supplied directory.
911if main['M5_BUILD_CACHE']:
912 print 'Using build cache located at', main['M5_BUILD_CACHE']
913 CacheDir(main['M5_BUILD_CACHE'])
914
915if not GetOption('without_python'):
916 # Find Python include and library directories for embedding the
917 # interpreter. We rely on python-config to resolve the appropriate
918 # includes and linker flags. ParseConfig does not seem to understand
919 # the more exotic linker flags such as -Xlinker and -export-dynamic so
920 # we add them explicitly below. If you want to link in an alternate
921 # version of python, see above for instructions on how to invoke
922 # scons with the appropriate PATH set.
923 #
924 # First we check if python2-config exists, else we use python-config
925 python_config = readCommand(['which', 'python2-config'],
926 exception='').strip()
927 if not os.path.exists(python_config):
928 python_config = readCommand(['which', 'python-config'],
929 exception='').strip()
930 py_includes = readCommand([python_config, '--includes'],
931 exception='').split()
932 # Strip the -I from the include folders before adding them to the
933 # CPPPATH
934 main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes))
935
936 # Read the linker flags and split them into libraries and other link
937 # flags. The libraries are added later through the call the CheckLib.
938 py_ld_flags = readCommand([python_config, '--ldflags'],
939 exception='').split()
940 py_libs = []
941 for lib in py_ld_flags:
942 if not lib.startswith('-l'):
943 main.Append(LINKFLAGS=[lib])
944 else:
945 lib = lib[2:]
946 if lib not in py_libs:
947 py_libs.append(lib)
948
949 # verify that this stuff works
950 if not conf.CheckHeader('Python.h', '<>'):
951 print "Error: can't find Python.h header in", py_includes
952 print "Install Python headers (package python-dev on Ubuntu and RedHat)"
953 Exit(1)
954
955 for lib in py_libs:
956 if not conf.CheckLib(lib):
957 print "Error: can't find library %s required by python" % lib
958 Exit(1)
959
960# On Solaris you need to use libsocket for socket ops
961if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'):
962 if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'):
963 print "Can't find library with socket calls (e.g. accept())"
964 Exit(1)
965
966# Check for zlib. If the check passes, libz will be automatically
967# added to the LIBS environment variable.
968if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'):
969 print 'Error: did not find needed zlib compression library '\
970 'and/or zlib.h header file.'
971 print ' Please install zlib and try again.'
972 Exit(1)
973
974# If we have the protobuf compiler, also make sure we have the
975# development libraries. If the check passes, libprotobuf will be
976# automatically added to the LIBS environment variable. After
977# this, we can use the HAVE_PROTOBUF flag to determine if we have
978# got both protoc and libprotobuf available.
979main['HAVE_PROTOBUF'] = main['PROTOC'] and \
980 conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h',
981 'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;')
982
983# If we have the compiler but not the library, print another warning.
984if main['PROTOC'] and not main['HAVE_PROTOBUF']:
985 print termcap.Yellow + termcap.Bold + \
986 'Warning: did not find protocol buffer library and/or headers.\n' + \
987 ' Please install libprotobuf-dev for tracing support.' + \
988 termcap.Normal
989
990# Check for librt.
991have_posix_clock = \
992 conf.CheckLibWithHeader(None, 'time.h', 'C',
993 'clock_nanosleep(0,0,NULL,NULL);') or \
994 conf.CheckLibWithHeader('rt', 'time.h', 'C',
995 'clock_nanosleep(0,0,NULL,NULL);')
996
997have_posix_timers = \
998 conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C',
999 'timer_create(CLOCK_MONOTONIC, NULL, NULL);')
1000
1001if not GetOption('without_tcmalloc'):
1002 if conf.CheckLib('tcmalloc'):
1003 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
1004 elif conf.CheckLib('tcmalloc_minimal'):
1005 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
1006 else:
1007 print termcap.Yellow + termcap.Bold + \
1008 "You can get a 12% performance improvement by "\
1009 "installing tcmalloc (libgoogle-perftools-dev package "\
1010 "on Ubuntu or RedHat)." + termcap.Normal
1011
1012if not have_posix_clock:
1013 print "Can't find library for POSIX clocks."
1014
1015# Check for <fenv.h> (C99 FP environment control)
1016have_fenv = conf.CheckHeader('fenv.h', '<>')
1017if not have_fenv:
1018 print "Warning: Header file <fenv.h> not found."
1019 print " This host has no IEEE FP rounding mode control."
1020
1021# Check if we should enable KVM-based hardware virtualization. The API
1022# we rely on exists since version 2.6.36 of the kernel, but somehow
1023# the KVM_API_VERSION does not reflect the change. We test for one of
1024# the types as a fall back.
1025have_kvm = conf.CheckHeader('linux/kvm.h', '<>')
1026if not have_kvm:
1027 print "Info: Compatible header file <linux/kvm.h> not found, " \
1028 "disabling KVM support."
1029
1030# x86 needs support for xsave. We test for the structure here since we
1031# won't be able to run new tests by the time we know which ISA we're
1032# targeting.
1033have_kvm_xsave = conf.CheckTypeSize('struct kvm_xsave',
1034 '#include <linux/kvm.h>') != 0
1035
1036# Check if the requested target ISA is compatible with the host
1037def is_isa_kvm_compatible(isa):
1038 try:
1039 import platform
1040 host_isa = platform.machine()
1041 except:
1042 print "Warning: Failed to determine host ISA."
1043 return False
1044
1045 if not have_posix_timers:
1046 print "Warning: Can not enable KVM, host seems to lack support " \
1047 "for POSIX timers"
1048 return False
1049
1050 if isa == "arm":
1051 return host_isa == "armv7l"
1051 return host_isa in ( "armv7l", "aarch64" )
1052 elif isa == "x86":
1053 if host_isa != "x86_64":
1054 return False
1055
1056 if not have_kvm_xsave:
1057 print "KVM on x86 requires xsave support in kernel headers."
1058 return False
1059
1060 return True
1061 else:
1062 return False
1063
1064
1065# Check if the exclude_host attribute is available. We want this to
1066# get accurate instruction counts in KVM.
1067main['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf.CheckMember(
1068 'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host')
1069
1070
1071######################################################################
1072#
1073# Finish the configuration
1074#
1075main = conf.Finish()
1076
1077######################################################################
1078#
1079# Collect all non-global variables
1080#
1081
1082# Define the universe of supported ISAs
1083all_isa_list = [ ]
1084Export('all_isa_list')
1085
1086class CpuModel(object):
1087 '''The CpuModel class encapsulates everything the ISA parser needs to
1088 know about a particular CPU model.'''
1089
1090 # Dict of available CPU model objects. Accessible as CpuModel.dict.
1091 dict = {}
1092
1093 # Constructor. Automatically adds models to CpuModel.dict.
1094 def __init__(self, name, default=False):
1095 self.name = name # name of model
1096
1097 # This cpu is enabled by default
1098 self.default = default
1099
1100 # Add self to dict
1101 if name in CpuModel.dict:
1102 raise AttributeError, "CpuModel '%s' already registered" % name
1103 CpuModel.dict[name] = self
1104
1105Export('CpuModel')
1106
1107# Sticky variables get saved in the variables file so they persist from
1108# one invocation to the next (unless overridden, in which case the new
1109# value becomes sticky).
1110sticky_vars = Variables(args=ARGUMENTS)
1111Export('sticky_vars')
1112
1113# Sticky variables that should be exported
1114export_vars = []
1115Export('export_vars')
1116
1117# For Ruby
1118all_protocols = []
1119Export('all_protocols')
1120protocol_dirs = []
1121Export('protocol_dirs')
1122slicc_includes = []
1123Export('slicc_includes')
1124
1125# Walk the tree and execute all SConsopts scripts that wil add to the
1126# above variables
1127if GetOption('verbose'):
1128 print "Reading SConsopts"
1129for bdir in [ base_dir ] + extras_dir_list:
1130 if not isdir(bdir):
1131 print "Error: directory '%s' does not exist" % bdir
1132 Exit(1)
1133 for root, dirs, files in os.walk(bdir):
1134 if 'SConsopts' in files:
1135 if GetOption('verbose'):
1136 print "Reading", joinpath(root, 'SConsopts')
1137 SConscript(joinpath(root, 'SConsopts'))
1138
1139all_isa_list.sort()
1140
1141sticky_vars.AddVariables(
1142 EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list),
1143 ListVariable('CPU_MODELS', 'CPU models',
1144 sorted(n for n,m in CpuModel.dict.iteritems() if m.default),
1145 sorted(CpuModel.dict.keys())),
1146 BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger',
1147 False),
1148 BoolVariable('SS_COMPATIBLE_FP',
1149 'Make floating-point results compatible with SimpleScalar',
1150 False),
1151 BoolVariable('USE_SSE2',
1152 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts',
1153 False),
1154 BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock),
1155 BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv),
1156 BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability', False),
1157 BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models', have_kvm),
1158 EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None',
1159 all_protocols),
1160 )
1161
1162# These variables get exported to #defines in config/*.hh (see src/SConscript).
1163export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'CP_ANNOTATE',
1164 'USE_POSIX_CLOCK', 'USE_KVM', 'PROTOCOL', 'HAVE_PROTOBUF',
1165 'HAVE_PERF_ATTR_EXCLUDE_HOST']
1166
1167###################################################
1168#
1169# Define a SCons builder for configuration flag headers.
1170#
1171###################################################
1172
1173# This function generates a config header file that #defines the
1174# variable symbol to the current variable setting (0 or 1). The source
1175# operands are the name of the variable and a Value node containing the
1176# value of the variable.
1177def build_config_file(target, source, env):
1178 (variable, value) = [s.get_contents() for s in source]
1179 f = file(str(target[0]), 'w')
1180 print >> f, '#define', variable, value
1181 f.close()
1182 return None
1183
1184# Combine the two functions into a scons Action object.
1185config_action = MakeAction(build_config_file, Transform("CONFIG H", 2))
1186
1187# The emitter munges the source & target node lists to reflect what
1188# we're really doing.
1189def config_emitter(target, source, env):
1190 # extract variable name from Builder arg
1191 variable = str(target[0])
1192 # True target is config header file
1193 target = joinpath('config', variable.lower() + '.hh')
1194 val = env[variable]
1195 if isinstance(val, bool):
1196 # Force value to 0/1
1197 val = int(val)
1198 elif isinstance(val, str):
1199 val = '"' + val + '"'
1200
1201 # Sources are variable name & value (packaged in SCons Value nodes)
1202 return ([target], [Value(variable), Value(val)])
1203
1204config_builder = Builder(emitter = config_emitter, action = config_action)
1205
1206main.Append(BUILDERS = { 'ConfigFile' : config_builder })
1207
1208# libelf build is shared across all configs in the build root.
1209main.SConscript('ext/libelf/SConscript',
1210 variant_dir = joinpath(build_root, 'libelf'))
1211
1212# gzstream build is shared across all configs in the build root.
1213main.SConscript('ext/gzstream/SConscript',
1214 variant_dir = joinpath(build_root, 'gzstream'))
1215
1216# libfdt build is shared across all configs in the build root.
1217main.SConscript('ext/libfdt/SConscript',
1218 variant_dir = joinpath(build_root, 'libfdt'))
1219
1220# fputils build is shared across all configs in the build root.
1221main.SConscript('ext/fputils/SConscript',
1222 variant_dir = joinpath(build_root, 'fputils'))
1223
1224# DRAMSim2 build is shared across all configs in the build root.
1225main.SConscript('ext/dramsim2/SConscript',
1226 variant_dir = joinpath(build_root, 'dramsim2'))
1227
1228# DRAMPower build is shared across all configs in the build root.
1229main.SConscript('ext/drampower/SConscript',
1230 variant_dir = joinpath(build_root, 'drampower'))
1231
1232###################################################
1233#
1234# This function is used to set up a directory with switching headers
1235#
1236###################################################
1237
1238main['ALL_ISA_LIST'] = all_isa_list
1239all_isa_deps = {}
1240def make_switching_dir(dname, switch_headers, env):
1241 # Generate the header. target[0] is the full path of the output
1242 # header to generate. 'source' is a dummy variable, since we get the
1243 # list of ISAs from env['ALL_ISA_LIST'].
1244 def gen_switch_hdr(target, source, env):
1245 fname = str(target[0])
1246 isa = env['TARGET_ISA'].lower()
1247 try:
1248 f = open(fname, 'w')
1249 print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname))
1250 f.close()
1251 except IOError:
1252 print "Failed to create %s" % fname
1253 raise
1254
1255 # Build SCons Action object. 'varlist' specifies env vars that this
1256 # action depends on; when env['ALL_ISA_LIST'] changes these actions
1257 # should get re-executed.
1258 switch_hdr_action = MakeAction(gen_switch_hdr,
1259 Transform("GENERATE"), varlist=['ALL_ISA_LIST'])
1260
1261 # Instantiate actions for each header
1262 for hdr in switch_headers:
1263 env.Command(hdr, [], switch_hdr_action)
1264
1265 isa_target = Dir('.').up().name.lower().replace('_', '-')
1266 env['PHONY_BASE'] = '#'+isa_target
1267 all_isa_deps[isa_target] = None
1268
1269Export('make_switching_dir')
1270
1271# all-isas -> all-deps -> all-environs -> all_targets
1272main.Alias('#all-isas', [])
1273main.Alias('#all-deps', '#all-isas')
1274
1275# Dummy target to ensure all environments are created before telling
1276# SCons what to actually make (the command line arguments). We attach
1277# them to the dependence graph after the environments are complete.
1278ORIG_BUILD_TARGETS = list(BUILD_TARGETS) # force a copy; gets closure to work.
1279def environsComplete(target, source, env):
1280 for t in ORIG_BUILD_TARGETS:
1281 main.Depends('#all-targets', t)
1282
1283# Each build/* switching_dir attaches its *-environs target to #all-environs.
1284main.Append(BUILDERS = {'CompleteEnvirons' :
1285 Builder(action=MakeAction(environsComplete, None))})
1286main.CompleteEnvirons('#all-environs', [])
1287
1288def doNothing(**ignored): pass
1289main.Append(BUILDERS = {'Dummy': Builder(action=MakeAction(doNothing, None))})
1290
1291# The final target to which all the original targets ultimately get attached.
1292main.Dummy('#all-targets', '#all-environs')
1293BUILD_TARGETS[:] = ['#all-targets']
1294
1295###################################################
1296#
1297# Define build environments for selected configurations.
1298#
1299###################################################
1300
1301for variant_path in variant_paths:
1302 if not GetOption('silent'):
1303 print "Building in", variant_path
1304
1305 # Make a copy of the build-root environment to use for this config.
1306 env = main.Clone()
1307 env['BUILDDIR'] = variant_path
1308
1309 # variant_dir is the tail component of build path, and is used to
1310 # determine the build parameters (e.g., 'ALPHA_SE')
1311 (build_root, variant_dir) = splitpath(variant_path)
1312
1313 # Set env variables according to the build directory config.
1314 sticky_vars.files = []
1315 # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in
1316 # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke
1317 # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings.
1318 current_vars_file = joinpath(build_root, 'variables', variant_dir)
1319 if isfile(current_vars_file):
1320 sticky_vars.files.append(current_vars_file)
1321 if not GetOption('silent'):
1322 print "Using saved variables file %s" % current_vars_file
1323 else:
1324 # Build dir-specific variables file doesn't exist.
1325
1326 # Make sure the directory is there so we can create it later
1327 opt_dir = dirname(current_vars_file)
1328 if not isdir(opt_dir):
1329 mkdir(opt_dir)
1330
1331 # Get default build variables from source tree. Variables are
1332 # normally determined by name of $VARIANT_DIR, but can be
1333 # overridden by '--default=' arg on command line.
1334 default = GetOption('default')
1335 opts_dir = joinpath(main.root.abspath, 'build_opts')
1336 if default:
1337 default_vars_files = [joinpath(build_root, 'variables', default),
1338 joinpath(opts_dir, default)]
1339 else:
1340 default_vars_files = [joinpath(opts_dir, variant_dir)]
1341 existing_files = filter(isfile, default_vars_files)
1342 if existing_files:
1343 default_vars_file = existing_files[0]
1344 sticky_vars.files.append(default_vars_file)
1345 print "Variables file %s not found,\n using defaults in %s" \
1346 % (current_vars_file, default_vars_file)
1347 else:
1348 print "Error: cannot find variables file %s or " \
1349 "default file(s) %s" \
1350 % (current_vars_file, ' or '.join(default_vars_files))
1351 Exit(1)
1352
1353 # Apply current variable settings to env
1354 sticky_vars.Update(env)
1355
1356 help_texts["local_vars"] += \
1357 "Build variables for %s:\n" % variant_dir \
1358 + sticky_vars.GenerateHelpText(env)
1359
1360 # Process variable settings.
1361
1362 if not have_fenv and env['USE_FENV']:
1363 print "Warning: <fenv.h> not available; " \
1364 "forcing USE_FENV to False in", variant_dir + "."
1365 env['USE_FENV'] = False
1366
1367 if not env['USE_FENV']:
1368 print "Warning: No IEEE FP rounding mode control in", variant_dir + "."
1369 print " FP results may deviate slightly from other platforms."
1370
1371 if env['EFENCE']:
1372 env.Append(LIBS=['efence'])
1373
1374 if env['USE_KVM']:
1375 if not have_kvm:
1376 print "Warning: Can not enable KVM, host seems to lack KVM support"
1377 env['USE_KVM'] = False
1378 elif not is_isa_kvm_compatible(env['TARGET_ISA']):
1379 print "Info: KVM support disabled due to unsupported host and " \
1380 "target ISA combination"
1381 env['USE_KVM'] = False
1382
1383 # Warn about missing optional functionality
1384 if env['USE_KVM']:
1385 if not main['HAVE_PERF_ATTR_EXCLUDE_HOST']:
1386 print "Warning: perf_event headers lack support for the " \
1387 "exclude_host attribute. KVM instruction counts will " \
1388 "be inaccurate."
1389
1390 # Save sticky variable settings back to current variables file
1391 sticky_vars.Save(current_vars_file, env)
1392
1393 if env['USE_SSE2']:
1394 env.Append(CCFLAGS=['-msse2'])
1395
1396 # The src/SConscript file sets up the build rules in 'env' according
1397 # to the configured variables. It returns a list of environments,
1398 # one for each variant build (debug, opt, etc.)
1399 SConscript('src/SConscript', variant_dir = variant_path, exports = 'env')
1400
1401def pairwise(iterable):
1402 "s -> (s0,s1), (s1,s2), (s2, s3), ..."
1403 a, b = itertools.tee(iterable)
1404 b.next()
1405 return itertools.izip(a, b)
1406
1407# Create false dependencies so SCons will parse ISAs, establish
1408# dependencies, and setup the build Environments serially. Either
1409# SCons (likely) and/or our SConscripts (possibly) cannot cope with -j
1410# greater than 1. It appears to be standard race condition stuff; it
1411# doesn't always fail, but usually, and the behaviors are different.
1412# Every time I tried to remove this, builds would fail in some
1413# creative new way. So, don't do that. You'll want to, though, because
1414# tests/SConscript takes a long time to make its Environments.
1415for t1, t2 in pairwise(sorted(all_isa_deps.iterkeys())):
1416 main.Depends('#%s-deps' % t2, '#%s-deps' % t1)
1417 main.Depends('#%s-environs' % t2, '#%s-environs' % t1)
1418
1419# base help text
1420Help('''
1421Usage: scons [scons options] [build variables] [target(s)]
1422
1423Extra scons options:
1424%(options)s
1425
1426Global build variables:
1427%(global_vars)s
1428
1429%(local_vars)s
1430''' % help_texts)
1052 elif isa == "x86":
1053 if host_isa != "x86_64":
1054 return False
1055
1056 if not have_kvm_xsave:
1057 print "KVM on x86 requires xsave support in kernel headers."
1058 return False
1059
1060 return True
1061 else:
1062 return False
1063
1064
1065# Check if the exclude_host attribute is available. We want this to
1066# get accurate instruction counts in KVM.
1067main['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf.CheckMember(
1068 'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host')
1069
1070
1071######################################################################
1072#
1073# Finish the configuration
1074#
1075main = conf.Finish()
1076
1077######################################################################
1078#
1079# Collect all non-global variables
1080#
1081
1082# Define the universe of supported ISAs
1083all_isa_list = [ ]
1084Export('all_isa_list')
1085
1086class CpuModel(object):
1087 '''The CpuModel class encapsulates everything the ISA parser needs to
1088 know about a particular CPU model.'''
1089
1090 # Dict of available CPU model objects. Accessible as CpuModel.dict.
1091 dict = {}
1092
1093 # Constructor. Automatically adds models to CpuModel.dict.
1094 def __init__(self, name, default=False):
1095 self.name = name # name of model
1096
1097 # This cpu is enabled by default
1098 self.default = default
1099
1100 # Add self to dict
1101 if name in CpuModel.dict:
1102 raise AttributeError, "CpuModel '%s' already registered" % name
1103 CpuModel.dict[name] = self
1104
1105Export('CpuModel')
1106
1107# Sticky variables get saved in the variables file so they persist from
1108# one invocation to the next (unless overridden, in which case the new
1109# value becomes sticky).
1110sticky_vars = Variables(args=ARGUMENTS)
1111Export('sticky_vars')
1112
1113# Sticky variables that should be exported
1114export_vars = []
1115Export('export_vars')
1116
1117# For Ruby
1118all_protocols = []
1119Export('all_protocols')
1120protocol_dirs = []
1121Export('protocol_dirs')
1122slicc_includes = []
1123Export('slicc_includes')
1124
1125# Walk the tree and execute all SConsopts scripts that wil add to the
1126# above variables
1127if GetOption('verbose'):
1128 print "Reading SConsopts"
1129for bdir in [ base_dir ] + extras_dir_list:
1130 if not isdir(bdir):
1131 print "Error: directory '%s' does not exist" % bdir
1132 Exit(1)
1133 for root, dirs, files in os.walk(bdir):
1134 if 'SConsopts' in files:
1135 if GetOption('verbose'):
1136 print "Reading", joinpath(root, 'SConsopts')
1137 SConscript(joinpath(root, 'SConsopts'))
1138
1139all_isa_list.sort()
1140
1141sticky_vars.AddVariables(
1142 EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list),
1143 ListVariable('CPU_MODELS', 'CPU models',
1144 sorted(n for n,m in CpuModel.dict.iteritems() if m.default),
1145 sorted(CpuModel.dict.keys())),
1146 BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger',
1147 False),
1148 BoolVariable('SS_COMPATIBLE_FP',
1149 'Make floating-point results compatible with SimpleScalar',
1150 False),
1151 BoolVariable('USE_SSE2',
1152 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts',
1153 False),
1154 BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock),
1155 BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv),
1156 BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability', False),
1157 BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models', have_kvm),
1158 EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None',
1159 all_protocols),
1160 )
1161
1162# These variables get exported to #defines in config/*.hh (see src/SConscript).
1163export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'CP_ANNOTATE',
1164 'USE_POSIX_CLOCK', 'USE_KVM', 'PROTOCOL', 'HAVE_PROTOBUF',
1165 'HAVE_PERF_ATTR_EXCLUDE_HOST']
1166
1167###################################################
1168#
1169# Define a SCons builder for configuration flag headers.
1170#
1171###################################################
1172
1173# This function generates a config header file that #defines the
1174# variable symbol to the current variable setting (0 or 1). The source
1175# operands are the name of the variable and a Value node containing the
1176# value of the variable.
1177def build_config_file(target, source, env):
1178 (variable, value) = [s.get_contents() for s in source]
1179 f = file(str(target[0]), 'w')
1180 print >> f, '#define', variable, value
1181 f.close()
1182 return None
1183
1184# Combine the two functions into a scons Action object.
1185config_action = MakeAction(build_config_file, Transform("CONFIG H", 2))
1186
1187# The emitter munges the source & target node lists to reflect what
1188# we're really doing.
1189def config_emitter(target, source, env):
1190 # extract variable name from Builder arg
1191 variable = str(target[0])
1192 # True target is config header file
1193 target = joinpath('config', variable.lower() + '.hh')
1194 val = env[variable]
1195 if isinstance(val, bool):
1196 # Force value to 0/1
1197 val = int(val)
1198 elif isinstance(val, str):
1199 val = '"' + val + '"'
1200
1201 # Sources are variable name & value (packaged in SCons Value nodes)
1202 return ([target], [Value(variable), Value(val)])
1203
1204config_builder = Builder(emitter = config_emitter, action = config_action)
1205
1206main.Append(BUILDERS = { 'ConfigFile' : config_builder })
1207
1208# libelf build is shared across all configs in the build root.
1209main.SConscript('ext/libelf/SConscript',
1210 variant_dir = joinpath(build_root, 'libelf'))
1211
1212# gzstream build is shared across all configs in the build root.
1213main.SConscript('ext/gzstream/SConscript',
1214 variant_dir = joinpath(build_root, 'gzstream'))
1215
1216# libfdt build is shared across all configs in the build root.
1217main.SConscript('ext/libfdt/SConscript',
1218 variant_dir = joinpath(build_root, 'libfdt'))
1219
1220# fputils build is shared across all configs in the build root.
1221main.SConscript('ext/fputils/SConscript',
1222 variant_dir = joinpath(build_root, 'fputils'))
1223
1224# DRAMSim2 build is shared across all configs in the build root.
1225main.SConscript('ext/dramsim2/SConscript',
1226 variant_dir = joinpath(build_root, 'dramsim2'))
1227
1228# DRAMPower build is shared across all configs in the build root.
1229main.SConscript('ext/drampower/SConscript',
1230 variant_dir = joinpath(build_root, 'drampower'))
1231
1232###################################################
1233#
1234# This function is used to set up a directory with switching headers
1235#
1236###################################################
1237
1238main['ALL_ISA_LIST'] = all_isa_list
1239all_isa_deps = {}
1240def make_switching_dir(dname, switch_headers, env):
1241 # Generate the header. target[0] is the full path of the output
1242 # header to generate. 'source' is a dummy variable, since we get the
1243 # list of ISAs from env['ALL_ISA_LIST'].
1244 def gen_switch_hdr(target, source, env):
1245 fname = str(target[0])
1246 isa = env['TARGET_ISA'].lower()
1247 try:
1248 f = open(fname, 'w')
1249 print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname))
1250 f.close()
1251 except IOError:
1252 print "Failed to create %s" % fname
1253 raise
1254
1255 # Build SCons Action object. 'varlist' specifies env vars that this
1256 # action depends on; when env['ALL_ISA_LIST'] changes these actions
1257 # should get re-executed.
1258 switch_hdr_action = MakeAction(gen_switch_hdr,
1259 Transform("GENERATE"), varlist=['ALL_ISA_LIST'])
1260
1261 # Instantiate actions for each header
1262 for hdr in switch_headers:
1263 env.Command(hdr, [], switch_hdr_action)
1264
1265 isa_target = Dir('.').up().name.lower().replace('_', '-')
1266 env['PHONY_BASE'] = '#'+isa_target
1267 all_isa_deps[isa_target] = None
1268
1269Export('make_switching_dir')
1270
1271# all-isas -> all-deps -> all-environs -> all_targets
1272main.Alias('#all-isas', [])
1273main.Alias('#all-deps', '#all-isas')
1274
1275# Dummy target to ensure all environments are created before telling
1276# SCons what to actually make (the command line arguments). We attach
1277# them to the dependence graph after the environments are complete.
1278ORIG_BUILD_TARGETS = list(BUILD_TARGETS) # force a copy; gets closure to work.
1279def environsComplete(target, source, env):
1280 for t in ORIG_BUILD_TARGETS:
1281 main.Depends('#all-targets', t)
1282
1283# Each build/* switching_dir attaches its *-environs target to #all-environs.
1284main.Append(BUILDERS = {'CompleteEnvirons' :
1285 Builder(action=MakeAction(environsComplete, None))})
1286main.CompleteEnvirons('#all-environs', [])
1287
1288def doNothing(**ignored): pass
1289main.Append(BUILDERS = {'Dummy': Builder(action=MakeAction(doNothing, None))})
1290
1291# The final target to which all the original targets ultimately get attached.
1292main.Dummy('#all-targets', '#all-environs')
1293BUILD_TARGETS[:] = ['#all-targets']
1294
1295###################################################
1296#
1297# Define build environments for selected configurations.
1298#
1299###################################################
1300
1301for variant_path in variant_paths:
1302 if not GetOption('silent'):
1303 print "Building in", variant_path
1304
1305 # Make a copy of the build-root environment to use for this config.
1306 env = main.Clone()
1307 env['BUILDDIR'] = variant_path
1308
1309 # variant_dir is the tail component of build path, and is used to
1310 # determine the build parameters (e.g., 'ALPHA_SE')
1311 (build_root, variant_dir) = splitpath(variant_path)
1312
1313 # Set env variables according to the build directory config.
1314 sticky_vars.files = []
1315 # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in
1316 # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke
1317 # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings.
1318 current_vars_file = joinpath(build_root, 'variables', variant_dir)
1319 if isfile(current_vars_file):
1320 sticky_vars.files.append(current_vars_file)
1321 if not GetOption('silent'):
1322 print "Using saved variables file %s" % current_vars_file
1323 else:
1324 # Build dir-specific variables file doesn't exist.
1325
1326 # Make sure the directory is there so we can create it later
1327 opt_dir = dirname(current_vars_file)
1328 if not isdir(opt_dir):
1329 mkdir(opt_dir)
1330
1331 # Get default build variables from source tree. Variables are
1332 # normally determined by name of $VARIANT_DIR, but can be
1333 # overridden by '--default=' arg on command line.
1334 default = GetOption('default')
1335 opts_dir = joinpath(main.root.abspath, 'build_opts')
1336 if default:
1337 default_vars_files = [joinpath(build_root, 'variables', default),
1338 joinpath(opts_dir, default)]
1339 else:
1340 default_vars_files = [joinpath(opts_dir, variant_dir)]
1341 existing_files = filter(isfile, default_vars_files)
1342 if existing_files:
1343 default_vars_file = existing_files[0]
1344 sticky_vars.files.append(default_vars_file)
1345 print "Variables file %s not found,\n using defaults in %s" \
1346 % (current_vars_file, default_vars_file)
1347 else:
1348 print "Error: cannot find variables file %s or " \
1349 "default file(s) %s" \
1350 % (current_vars_file, ' or '.join(default_vars_files))
1351 Exit(1)
1352
1353 # Apply current variable settings to env
1354 sticky_vars.Update(env)
1355
1356 help_texts["local_vars"] += \
1357 "Build variables for %s:\n" % variant_dir \
1358 + sticky_vars.GenerateHelpText(env)
1359
1360 # Process variable settings.
1361
1362 if not have_fenv and env['USE_FENV']:
1363 print "Warning: <fenv.h> not available; " \
1364 "forcing USE_FENV to False in", variant_dir + "."
1365 env['USE_FENV'] = False
1366
1367 if not env['USE_FENV']:
1368 print "Warning: No IEEE FP rounding mode control in", variant_dir + "."
1369 print " FP results may deviate slightly from other platforms."
1370
1371 if env['EFENCE']:
1372 env.Append(LIBS=['efence'])
1373
1374 if env['USE_KVM']:
1375 if not have_kvm:
1376 print "Warning: Can not enable KVM, host seems to lack KVM support"
1377 env['USE_KVM'] = False
1378 elif not is_isa_kvm_compatible(env['TARGET_ISA']):
1379 print "Info: KVM support disabled due to unsupported host and " \
1380 "target ISA combination"
1381 env['USE_KVM'] = False
1382
1383 # Warn about missing optional functionality
1384 if env['USE_KVM']:
1385 if not main['HAVE_PERF_ATTR_EXCLUDE_HOST']:
1386 print "Warning: perf_event headers lack support for the " \
1387 "exclude_host attribute. KVM instruction counts will " \
1388 "be inaccurate."
1389
1390 # Save sticky variable settings back to current variables file
1391 sticky_vars.Save(current_vars_file, env)
1392
1393 if env['USE_SSE2']:
1394 env.Append(CCFLAGS=['-msse2'])
1395
1396 # The src/SConscript file sets up the build rules in 'env' according
1397 # to the configured variables. It returns a list of environments,
1398 # one for each variant build (debug, opt, etc.)
1399 SConscript('src/SConscript', variant_dir = variant_path, exports = 'env')
1400
1401def pairwise(iterable):
1402 "s -> (s0,s1), (s1,s2), (s2, s3), ..."
1403 a, b = itertools.tee(iterable)
1404 b.next()
1405 return itertools.izip(a, b)
1406
1407# Create false dependencies so SCons will parse ISAs, establish
1408# dependencies, and setup the build Environments serially. Either
1409# SCons (likely) and/or our SConscripts (possibly) cannot cope with -j
1410# greater than 1. It appears to be standard race condition stuff; it
1411# doesn't always fail, but usually, and the behaviors are different.
1412# Every time I tried to remove this, builds would fail in some
1413# creative new way. So, don't do that. You'll want to, though, because
1414# tests/SConscript takes a long time to make its Environments.
1415for t1, t2 in pairwise(sorted(all_isa_deps.iterkeys())):
1416 main.Depends('#%s-deps' % t2, '#%s-deps' % t1)
1417 main.Depends('#%s-environs' % t2, '#%s-environs' % t1)
1418
1419# base help text
1420Help('''
1421Usage: scons [scons options] [build variables] [target(s)]
1422
1423Extra scons options:
1424%(options)s
1425
1426Global build variables:
1427%(global_vars)s
1428
1429%(local_vars)s
1430''' % help_texts)