summaryrefslogtreecommitdiff
path: root/tools/build/v2/tools
diff options
context:
space:
mode:
Diffstat (limited to 'tools/build/v2/tools')
-rw-r--r--tools/build/v2/tools/auto-index.jam4
-rw-r--r--tools/build/v2/tools/boostbook.jam3
-rw-r--r--tools/build/v2/tools/builtin.py131
-rw-r--r--tools/build/v2/tools/cast.jam2
-rw-r--r--tools/build/v2/tools/common.jam13
-rw-r--r--tools/build/v2/tools/common.py16
-rw-r--r--tools/build/v2/tools/cray.jam112
-rw-r--r--tools/build/v2/tools/darwin.jam27
-rw-r--r--tools/build/v2/tools/docutils.jam1
-rw-r--r--tools/build/v2/tools/gcc.jam21
-rw-r--r--tools/build/v2/tools/gcc.py14
-rw-r--r--tools/build/v2/tools/intel-win.jam4
-rw-r--r--tools/build/v2/tools/mc.py46
-rw-r--r--tools/build/v2/tools/midl.py134
-rw-r--r--tools/build/v2/tools/msvc.jam10
-rw-r--r--tools/build/v2/tools/msvc.py1198
-rw-r--r--tools/build/v2/tools/pch.py14
-rw-r--r--tools/build/v2/tools/python.jam2
-rw-r--r--tools/build/v2/tools/stage.jam2
-rw-r--r--tools/build/v2/tools/stage.py2
-rw-r--r--tools/build/v2/tools/types/__init__.py1
-rw-r--r--tools/build/v2/tools/types/cpp.py5
-rw-r--r--tools/build/v2/tools/types/preprocessed.py11
-rw-r--r--tools/build/v2/tools/unix.py8
24 files changed, 1664 insertions, 117 deletions
diff --git a/tools/build/v2/tools/auto-index.jam b/tools/build/v2/tools/auto-index.jam
index ebbf344eba..5c5c1d06cf 100644
--- a/tools/build/v2/tools/auto-index.jam
+++ b/tools/build/v2/tools/auto-index.jam
@@ -11,8 +11,8 @@ feature.feature auto-index : off "on" ;
feature.feature auto-index-internal : off "on" ;
feature.feature auto-index-verbose : off "on" ;
feature.feature auto-index-no-duplicates : off "on" ;
-feature.feature auto-index-script : : free ;
-feature.feature auto-index-prefix : : free ;
+feature.feature auto-index-script : : free path ;
+feature.feature auto-index-prefix : : free path ;
feature.feature auto-index-type : : free ;
feature.feature auto-index-section-names : "on" off ;
diff --git a/tools/build/v2/tools/boostbook.jam b/tools/build/v2/tools/boostbook.jam
index 3a5964c627..3ab0debdb0 100644
--- a/tools/build/v2/tools/boostbook.jam
+++ b/tools/build/v2/tools/boostbook.jam
@@ -336,6 +336,9 @@ rule find-tools ( docbook-xsl-dir ? : docbook-dtd-dir ? : boostbook-dir ? )
# Ubuntu Linux
docbook-xsl-dir ?= [ path.glob /usr/share/xml/docbook/stylesheet : nwalsh ] ;
docbook-dtd-dir ?= [ path.glob /usr/share/xml/docbook/schema/dtd : $(dtd-versions) ] ;
+
+ # SUSE
+ docbook-xsl-dir ?= [ path.glob /usr/share/xml/docbook/stylesheet/nwalsh : current ] ;
}
if $(docbook-xsl-dir)
diff --git a/tools/build/v2/tools/builtin.py b/tools/build/v2/tools/builtin.py
index 31a7bffeef..5b28a0aa78 100644
--- a/tools/build/v2/tools/builtin.py
+++ b/tools/build/v2/tools/builtin.py
@@ -144,11 +144,20 @@ def register_globals ():
feature.feature ('threading', ['single', 'multi'], ['propagated'])
feature.feature ('rtti', ['on', 'off'], ['propagated'])
feature.feature ('exception-handling', ['on', 'off'], ['propagated'])
+
+ # Whether there is support for asynchronous EH (e.g. catching SEGVs).
+ feature.feature ('asynch-exceptions', ['on', 'off'], ['propagated'])
+
+ # Whether all extern "C" functions are considered nothrow by default.
+ feature.feature ('extern-c-nothrow', ['off', 'on'], ['propagated'])
+
feature.feature ('debug-symbols', ['on', 'off'], ['propagated'])
feature.feature ('define', [], ['free'])
+ feature.feature ('undef', [], ['free'])
feature.feature ('include', [], ['free', 'path']) #order-sensitive
feature.feature ('cflags', [], ['free'])
feature.feature ('cxxflags', [], ['free'])
+ feature.feature ('asmflags', [], ['free'])
feature.feature ('linkflags', [], ['free'])
feature.feature ('archiveflags', [], ['free'])
feature.feature ('version', [], ['free'])
@@ -309,10 +318,6 @@ def register_globals ():
variant ('release', ['<optimization>speed', '<debug-symbols>off', '<inlining>full',
'<runtime-debugging>off', '<define>NDEBUG'])
variant ('profile', ['release'], ['<profiling>on', '<debug-symbols>on'])
-
- type.register ('H', ['h'])
- type.register ('HPP', ['hpp'], 'H')
- type.register ('C', ['c'])
reset ()
@@ -348,8 +353,11 @@ class SearchedLibTarget (virtual_target.AbstractFileTarget):
class CScanner (scanner.Scanner):
def __init__ (self, includes):
scanner.Scanner.__init__ (self)
-
- self.includes_ = includes
+
+ self.includes_ = []
+
+ for i in includes:
+ self.includes_.extend(i.split("&&"))
def pattern (self):
return r'#[ \t]*include[ ]*(<(.*)>|"(.*)")'
@@ -399,7 +407,7 @@ class LibGenerator (generators.Generator):
SHARED_LIB.
"""
- def __init__(self, id = 'LibGenerator', composing = True, source_types = [], target_types_and_names = ['LIB'], requirements = []):
+ def __init__(self, id, composing = True, source_types = [], target_types_and_names = ['LIB'], requirements = []):
generators.Generator.__init__(self, id, composing, source_types, target_types_and_names, requirements)
def run(self, project, name, prop_set, sources):
@@ -432,7 +440,9 @@ class LibGenerator (generators.Generator):
def viable_source_types(self):
return ['*']
-generators.register(LibGenerator())
+generators.register(LibGenerator("builtin.lib-generator"))
+
+generators.override("builtin.prebuilt", "builtin.lib-generator")
def lib(names, sources=[], requirements=[], default_build=[], usage_requirements=[]):
"""The implementation of the 'lib' rule. Beyond standard syntax that rule allows
@@ -508,22 +518,19 @@ class SearchedLibGenerator (generators.Generator):
generators.register (SearchedLibGenerator ())
-### class prebuilt-lib-generator : generator
-### {
-### rule __init__ ( * : * )
-### {
-### generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
-### }
-###
-### rule run ( project name ? : prop_set : sources * : multiple ? )
-### {
-### local f = [ $(prop_set).get <file> ] ;
-### return $(f) $(sources) ;
-### }
-### }
-###
-### generators.register
-### [ new prebuilt-lib-generator builtin.prebuilt : : LIB : <file> ] ;
+class PrebuiltLibGenerator(generators.Generator):
+
+ def __init__(self, id, composing, source_types, target_types_and_names, requirements):
+ generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
+
+ def run(self, project, name, properties, sources):
+ f = properties.get("file")
+ return f + sources
+
+generators.register(PrebuiltLibGenerator("builtin.prebuilt", False, [],
+ ["LIB"], ["<file>"]))
+
+generators.override("builtin.prebuilt", "builtin.lib-generator")
class CompileAction (virtual_target.Action):
@@ -565,9 +572,8 @@ class LinkingGenerator (generators.Generator):
generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
def run (self, project, name, prop_set, sources):
-
- lib_sources = prop_set.get('<library>')
- sources.extend(lib_sources)
+
+ sources.extend(prop_set.get('<library>'))
# Add <library-path> properties for all searched libraries
extra = []
@@ -576,38 +582,41 @@ class LinkingGenerator (generators.Generator):
search = s.search()
extra.extend(property.Property('<library-path>', sp) for sp in search)
- orig_xdll_path = []
-
- if prop_set.get('<hardcode-dll-paths>') == ['true'] \
- and type.is_derived(self.target_types_ [0], 'EXE'):
- xdll_path = prop_set.get('<xdll-path>')
- orig_xdll_path = [ replace_grist(x, '<dll-path>') for x in xdll_path ]
- # It's possible that we have libraries in sources which did not came
- # from 'lib' target. For example, libraries which are specified
- # just as filenames as sources. We don't have xdll-path properties
- # for such target, but still need to add proper dll-path properties.
- for s in sources:
+ # It's possible that we have libraries in sources which did not came
+ # from 'lib' target. For example, libraries which are specified
+ # just as filenames as sources. We don't have xdll-path properties
+ # for such target, but still need to add proper dll-path properties.
+ extra_xdll_path = []
+ for s in sources:
if type.is_derived (s.type (), 'SHARED_LIB') and not s.action ():
# Unfortunately, we don't have a good way to find the path
# to a file, so use this nasty approach.
p = s.project()
- location = path.root(s.name(), p.get('source-location'))
- xdll_path.append(path.parent(location))
-
- extra.extend(property.Property('<dll-path>', sp) for sp in xdll_path)
+ location = path.root(s.name(), p.get('source-location')[0])
+ extra_xdll_path.append(os.path.dirname(location))
+
+ # Hardcode DLL paths only when linking executables.
+ # Pros: do not need to relink libraries when installing.
+ # Cons: "standalone" libraries (plugins, python extensions) can not
+ # hardcode paths to dependent libraries.
+ if prop_set.get('<hardcode-dll-paths>') == ['true'] \
+ and type.is_derived(self.target_types_ [0], 'EXE'):
+ xdll_path = prop_set.get('<xdll-path>')
+ extra.extend(property.Property('<dll-path>', sp) \
+ for sp in extra_xdll_path)
+ extra.extend(property.Property('<dll-path>', sp) \
+ for sp in xdll_path)
if extra:
- prop_set = prop_set.add_raw (extra)
-
+ prop_set = prop_set.add_raw (extra)
result = generators.Generator.run(self, project, name, prop_set, sources)
-
+
if result:
ur = self.extra_usage_requirements(result, prop_set)
- ur = ur.add(property_set.create(orig_xdll_path))
+ ur = ur.add(property_set.create(['<xdll-path>' + p for p in extra_xdll_path]))
else:
return None
-
- return(ur, result)
+ return (ur, result)
def extra_usage_requirements (self, created_targets, prop_set):
@@ -696,20 +705,18 @@ class ArchiveGenerator (generators.Generator):
return result
-### rule register-archiver ( id composing ? : source_types + : target_types + :
-### requirements * )
-### {
-### local g = [ new ArchiveGenerator $(id) $(composing) : $(source_types)
-### : $(target_types) : $(requirements) ] ;
-### generators.register $(g) ;
-### }
-###
-###
-### IMPORT $(__name__) : register-linker register-archiver
-### : : generators.register-linker generators.register-archiver ;
-###
-###
-###
+
+def register_archiver(id, source_types, target_types, requirements):
+ g = ArchiveGenerator(id, True, source_types, target_types, requirements)
+ generators.register(g)
+
+class DummyGenerator(generators.Generator):
+ """Generator that accepts everything and produces nothing. Useful as a general
+ fallback for toolset-specific actions like PCH generation.
+ """
+ def run (self, project, name, prop_set, sources):
+ return (property_set.empty(), [])
+
get_manager().projects().add_rule("variant", variant)
diff --git a/tools/build/v2/tools/cast.jam b/tools/build/v2/tools/cast.jam
index 6c84922f1f..211ce63296 100644
--- a/tools/build/v2/tools/cast.jam
+++ b/tools/build/v2/tools/cast.jam
@@ -11,7 +11,7 @@
#
# exe main : main.cpp [ cast _ moccable-cpp : widget.cpp ] ;
#
-# Boost.Build will assing target type CPP to both main.cpp and widget.cpp. Then,
+# Boost.Build will assign target type CPP to both main.cpp and widget.cpp. Then,
# the cast rule will change target type of widget.cpp to MOCCABLE-CPP, and Qt
# support will run the MOC tool as part of the build process.
#
diff --git a/tools/build/v2/tools/common.jam b/tools/build/v2/tools/common.jam
index df914d9d46..53e91b428f 100644
--- a/tools/build/v2/tools/common.jam
+++ b/tools/build/v2/tools/common.jam
@@ -334,7 +334,7 @@ rule get-absolute-tool-path ( command )
# first path found. Otherwise, returns an empty string. If 'path-last' is
# specified, PATH is searched after 'additional-paths'.
#
-local rule find-tool ( name : additional-paths * : path-last ? )
+rule find-tool ( name : additional-paths * : path-last ? )
{
local path = [ path.programs-path ] ;
local match = [ path.glob $(path) : $(name) $(name).exe ] ;
@@ -961,25 +961,22 @@ rule __test__ ( )
{
import assert ;
- local nl = "
-" ;
-
local save-os = [ modules.peek os : .name ] ;
modules.poke os : .name : LINUX ;
- assert.result "PATH=\"foo:bar:baz\"$(nl)export PATH$(nl)"
+ assert.result "PATH=\"foo:bar:baz\"\nexport PATH\n"
: path-variable-setting-command PATH : foo bar baz ;
- assert.result "PATH=\"foo:bar:$PATH\"$(nl)export PATH$(nl)"
+ assert.result "PATH=\"foo:bar:$PATH\"\nexport PATH\n"
: prepend-path-variable-command PATH : foo bar ;
modules.poke os : .name : NT ;
- assert.result "set PATH=foo;bar;baz$(nl)"
+ assert.result "set PATH=foo;bar;baz\n"
: path-variable-setting-command PATH : foo bar baz ;
- assert.result "set PATH=foo;bar;%PATH%$(nl)"
+ assert.result "set PATH=foo;bar;%PATH%\n"
: prepend-path-variable-command PATH : foo bar ;
modules.poke os : .name : $(save-os) ;
diff --git a/tools/build/v2/tools/common.py b/tools/build/v2/tools/common.py
index 612745b81f..3eb0f7d3f1 100644
--- a/tools/build/v2/tools/common.py
+++ b/tools/build/v2/tools/common.py
@@ -53,7 +53,7 @@ def reset ():
if OS == "NT":
# On Windows the case and capitalization of PATH is not always predictable, so
# let's find out what variable name was really set.
- for n in sys.environ:
+ for n in os.environ:
if n.lower() == "path":
__executable_path_variable = n
break
@@ -99,7 +99,7 @@ class Configurations(object):
def __init__(self):
self.used_ = set()
self.all_ = set()
- self.params = {}
+ self.params_ = {}
def register(self, id):
"""
@@ -113,7 +113,7 @@ class Configurations(object):
errors.error("common: the configuration '$(id)' is in use")
if id not in self.all_:
- self.all_ += [id]
+ self.all_.add(id)
# Indicate that a new configuration has been added.
return True
@@ -133,7 +133,7 @@ class Configurations(object):
errors.error("common: the configuration '$(id)' is not known")
if id not in self.used_:
- self.used_ += [id]
+ self.used_.add(id)
# indicate that the configuration has been marked as 'used'
return True
@@ -150,7 +150,7 @@ class Configurations(object):
def get(self, id, param):
""" Returns the value of a configuration parameter. """
- self.params_.getdefault(param, {}).getdefault(id, None)
+ return self.params_.get(param, {}).get(id)
def set (self, id, param, value):
""" Sets the value of a configuration parameter. """
@@ -294,6 +294,8 @@ def get_invocation_command_nodefault(
#print "warning: initialized from" [ errors.nearest-user-location ] ;
else:
command = check_tool(user_provided_command)
+ assert(isinstance(command, list))
+ command=' '.join(command)
if not command and __debug_configuration:
print "warning: toolset", toolset, "initialization:"
print "warning: can't find user-provided command", user_provided_command
@@ -347,7 +349,9 @@ def get_absolute_tool_path(command):
programs = path.programs_path()
m = path.glob(programs, [command, command + '.exe' ])
if not len(m):
- print "Could not find:", command, "in", programs
+ if __debug_configuration:
+ print "Could not find:", command, "in", programs
+ return None
return os.path.dirname(m[0])
# ported from trunk@47174
diff --git a/tools/build/v2/tools/cray.jam b/tools/build/v2/tools/cray.jam
new file mode 100644
index 0000000000..1d5271e972
--- /dev/null
+++ b/tools/build/v2/tools/cray.jam
@@ -0,0 +1,112 @@
+# Copyright 2001 David Abrahams.
+# Copyright 2004, 2005 Markus Schoepflin.
+# Copyright 2011, John Maddock
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+#
+# Cray C++ Compiler
+# See http://docs.cray.com/books/S-2179-50/html-S-2179-50/S-2179-50-toc.html
+#
+
+import feature generators common ;
+import toolset : flags ;
+
+feature.extend toolset : cray ;
+
+# Inherit from Unix toolset to get library ordering magic.
+toolset.inherit cray : unix ;
+
+generators.override cray.prebuilt : builtin.lib-generator ;
+generators.override cray.prebuilt : builtin.prebuilt ;
+generators.override cray.searched-lib-generator : searched-lib-generator ;
+
+
+rule init ( version ? : command * : options * )
+{
+ local condition = [ common.check-init-parameters cray : version $(version) ] ;
+
+ local command = [ common.get-invocation-command cray : CC : $(command) ] ;
+
+ if $(command)
+ {
+ local root = [ common.get-absolute-tool-path $(command[-1]) ] ;
+
+ if $(root)
+ {
+ flags cray .root $(condition) : "\"$(root)\"/" ;
+ }
+ }
+ # If we can't find 'CC' anyway, at least show 'CC' in the commands
+ command ?= CC ;
+
+ common.handle-options cray : $(condition) : $(command) : $(options) ;
+}
+
+generators.register-c-compiler cray.compile.c++ : CPP : OBJ : <toolset>cray ;
+generators.register-c-compiler cray.compile.c : C : OBJ : <toolset>cray ;
+
+
+
+# No static linking as far as I can tell.
+# flags cxx LINKFLAGS <runtime-link>static : -bstatic ;
+flags cray.compile OPTIONS <debug-symbols>on : -Gn ;
+flags cray.link OPTIONS <debug-symbols>on : -Gn ;
+
+flags cray.compile OPTIONS <optimization>off : -O0 ;
+flags cray.compile OPTIONS <optimization>speed : -O3 ;
+flags cray.compile OPTIONS <optimization>space : -O1 ;
+
+flags cray.compile OPTIONS <cflags> ;
+flags cray.compile.c++ OPTIONS <cxxflags> ;
+flags cray.compile DEFINES <define> ;
+flags cray.compile INCLUDES <include> ;
+flags cray.link OPTIONS <linkflags> ;
+
+flags cray.link LIBPATH <library-path> ;
+flags cray.link LIBRARIES <library-file> ;
+flags cray.link FINDLIBS-ST <find-static-library> ;
+flags cray.link FINDLIBS-SA <find-shared-library> ;
+
+actions link bind LIBRARIES
+{
+ $(CONFIG_COMMAND) $(OPTIONS) -o "$(<)" -L$(LIBPATH) "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA)
+}
+
+# When creating dynamic libraries, we don't want to be warned about unresolved
+# symbols, therefore all unresolved symbols are marked as expected by
+# '-expect_unresolved *'. This also mirrors the behaviour of the GNU tool
+# chain.
+
+actions link.dll bind LIBRARIES
+{
+ $(CONFIG_COMMAND) -shared $(OPTIONS) -o "$(<[1])" -L$(LIBPATH) "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA)
+}
+
+
+# Note: Relaxed ANSI mode (-std) is used for compilation because in strict ANSI
+# C89 mode (-std1) the compiler doesn't accept C++ comments in C files. As -std
+# is the default, no special flag is needed.
+actions compile.c
+{
+ $(.root:E=)cc -c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)"
+}
+
+# Note: The compiler is forced to compile the files as C++ (-x cxx) because
+# otherwise it will silently ignore files with no file extension.
+#
+# Note: We deliberately don't suppress any warnings on the compiler command
+# line, the user can always do this in a customized toolset later on.
+
+actions compile.c++
+{
+ $(CONFIG_COMMAND) -c -h gnu $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)"
+}
+
+# Always create archive from scratch. See the gcc toolet for rationale.
+RM = [ common.rm-command ] ;
+actions together piecemeal archive
+{
+ $(RM) "$(<)"
+ ar rc $(<) $(>)
+}
diff --git a/tools/build/v2/tools/darwin.jam b/tools/build/v2/tools/darwin.jam
index 283dface94..dd6eacb114 100644
--- a/tools/build/v2/tools/darwin.jam
+++ b/tools/build/v2/tools/darwin.jam
@@ -231,7 +231,7 @@ local rule init-sdk ( condition * : root ? : version + : version-feature ? )
{
if $(.debug-configuration)
{
- ECHO notice: available sdk for $(condition)/<macosx-version>$(version-feature) at $(sdk) ;
+ ECHO notice: available sdk for $(condition)/<macosx-version>$(version-feature) at $(root) ;
}
# Add the version to the features for specifying them.
@@ -247,9 +247,9 @@ local rule init-sdk ( condition * : root ? : version + : version-feature ? )
# Set the flags the version needs to compile with, first
# generic options.
flags darwin.compile OPTIONS $(condition)/<macosx-version>$(version-feature)
- : -isysroot $(sdk) ;
+ : -isysroot $(root) ;
flags darwin.link OPTIONS $(condition)/<macosx-version>$(version-feature)
- : -isysroot $(sdk) ;
+ : -isysroot $(root) ;
# Then device variation options.
switch $(version[1])
@@ -287,13 +287,28 @@ local rule init-sdk ( condition * : root ? : version + : version-feature ? )
}
}
- return $(version-feature) ;
+ if $(version[3]) > 0
+ {
+ # We have a minor version of an SDK. We want to set up
+ # previous minor versions, plus the current minor version.
+ # So we recurse to set up the previous minor versions, up to
+ # the current version.
+ local minor-minus-1 = [ CALC $(version[3]) - 1 ] ;
+ return
+ [ init-sdk $(condition) : $(root)
+ : $(version[1-2]) $(minor-minus-1) : [ version-to-feature $(version[1-2]) $(minor-minus-1) ] ]
+ $(version-feature) ;
+ }
+ else
+ {
+ return $(version-feature) ;
+ }
}
else if $(version[4])
{
# We have a patch version of an SDK. We want to set up
# both the specific patch version, and the minor version.
- # So we recurse to set up the minor version. Plus the minor version.
+ # So we recurse to set up the patch version. Plus the minor version.
return
[ init-sdk $(condition) : $(root)
: $(version[1-3]) : [ version-to-feature $(version[1-3]) ] ]
@@ -498,7 +513,7 @@ flags darwin.link OPTIONS <optimization>space : -Wl,-dead_strip -no_dead_strip_i
flags darwin.compile OPTIONS <link>shared : -dynamic ;
# Misc options.
-flags darwin.compile OPTIONS : -no-cpp-precomp -gdwarf-2 -fexceptions ;
+flags darwin.compile OPTIONS : -gdwarf-2 -fexceptions ;
#~ flags darwin.link OPTIONS : -fexceptions ;
# Add the framework names to use.
diff --git a/tools/build/v2/tools/docutils.jam b/tools/build/v2/tools/docutils.jam
index bf0616174b..fc775b6fc3 100644
--- a/tools/build/v2/tools/docutils.jam
+++ b/tools/build/v2/tools/docutils.jam
@@ -56,6 +56,7 @@ rule init ( docutils-dir ? : tools-dir ? )
.setup = [
common.prepend-path-variable-command PYTHONPATH
: $(.docutils-dir) $(.docutils-dir)/extras ] ;
+ RST2XXX = [ common.find-tool rst2html ] ;
}
}
diff --git a/tools/build/v2/tools/gcc.jam b/tools/build/v2/tools/gcc.jam
index f7b0da542e..ee3aae128a 100644
--- a/tools/build/v2/tools/gcc.jam
+++ b/tools/build/v2/tools/gcc.jam
@@ -446,13 +446,17 @@ rule setup-address-model ( targets * : sources * : properties * )
}
else
{
- if $(model) = 32
- {
- option = -m32 ;
- }
- else if $(model) = 64
+ local arch = [ feature.get-values architecture : $(properties) ] ;
+ if $(arch) != arm
{
- option = -m64 ;
+ if $(model) = 32
+ {
+ option = -m32 ;
+ }
+ else if $(model) = 64
+ {
+ option = -m64 ;
+ }
}
# For darwin, the model can be 32_64. darwin.jam will handle that
# on its own.
@@ -1097,9 +1101,10 @@ local rule cpu-flags ( toolset variable : architecture : instruction-set + : val
#
# x86 and compatible
# The 'native' option appeared in gcc 4.2 so we cannot safely use it
-# as default. Use conservative i386 instead.
+# as default. Use conservative i386 instead for 32-bit.
+toolset.flags gcc OPTIONS <architecture>x86/<address-model>32/<instruction-set> : -march=i386 ;
cpu-flags gcc OPTIONS : x86 : native : -march=native ;
-cpu-flags gcc OPTIONS : x86 : i386 : -march=i386 : default ;
+cpu-flags gcc OPTIONS : x86 : i386 : -march=i386 ;
cpu-flags gcc OPTIONS : x86 : i486 : -march=i486 ;
cpu-flags gcc OPTIONS : x86 : i586 : -march=i586 ;
cpu-flags gcc OPTIONS : x86 : i686 : -march=i686 ;
diff --git a/tools/build/v2/tools/gcc.py b/tools/build/v2/tools/gcc.py
index 2a3e675ef1..b0aba1d293 100644
--- a/tools/build/v2/tools/gcc.py
+++ b/tools/build/v2/tools/gcc.py
@@ -23,13 +23,16 @@ import re
import bjam
from b2.tools import unix, common, rc, pch, builtin
-from b2.build import feature, type, toolset, generators
+from b2.build import feature, type, toolset, generators, property_set
+from b2.build.property import Property
from b2.util.utility import os_name, on_windows
from b2.manager import get_manager
from b2.build.generators import Generator
from b2.build.toolset import flags
from b2.util.utility import to_seq
+
+
__debug = None
def debug():
@@ -222,12 +225,12 @@ class GccPchGenerator(pch.PchGenerator):
# Find the header in sources. Ignore any CPP sources.
header = None
for s in sources:
- if type.is_derived(s.type, 'H'):
+ if type.is_derived(s.type(), 'H'):
header = s
# Error handling: Base header file name should be the same as the base
# precompiled header name.
- header_name = header.name
+ header_name = header.name()
header_basename = os.path.basename(header_name).rsplit('.', 1)[0]
if header_basename != name:
location = project.project_module
@@ -239,14 +242,15 @@ class GccPchGenerator(pch.PchGenerator):
# return result of base class and pch-file property as usage-requirements
# FIXME: what about multiple results from generator.run?
- return (property_set.create('<pch-file>' + pch_file[0], '<cflags>-Winvalid-pch'),
+ return (property_set.create([Property('pch-file', pch_file[0]),
+ Property('cflags', '-Winvalid-pch')]),
pch_file)
# Calls the base version specifying source's name as the name of the created
# target. As result, the PCH will be named whatever.hpp.gch, and not
# whatever.gch.
def generated_targets(self, sources, prop_set, project, name = None):
- name = sources[0].name
+ name = sources[0].name()
return Generator.generated_targets(self, sources,
prop_set, project, name)
diff --git a/tools/build/v2/tools/intel-win.jam b/tools/build/v2/tools/intel-win.jam
index 691b5dce98..c9adac0d96 100644
--- a/tools/build/v2/tools/intel-win.jam
+++ b/tools/build/v2/tools/intel-win.jam
@@ -161,8 +161,8 @@ rule init ( version ? : # the compiler version
if ! $(compatibility)
{
- # If there's no backend version, assume 7.1.
- compatibility = vc7.1 ;
+ # If there's no backend version, assume 10.
+ compatibility = vc10 ;
}
local extract-version = [ MATCH ^vc(.*) : $(compatibility) ] ;
diff --git a/tools/build/v2/tools/mc.py b/tools/build/v2/tools/mc.py
new file mode 100644
index 0000000000..c194acdff7
--- /dev/null
+++ b/tools/build/v2/tools/mc.py
@@ -0,0 +1,46 @@
+# Copyright (c) 2005 Alexey Pakhunov.
+# Copyright (c) 2011 Juraj Ivancic
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Support for Microsoft message compiler tool.
+# Notes:
+# - there's just message compiler tool, there's no tool for
+# extracting message strings from sources
+# - This file allows to use Microsoft message compiler
+# with any toolset. In msvc.jam, there's more specific
+# message compiling action.
+
+import bjam
+
+from b2.tools import common, rc
+from b2.build import generators, type
+from b2.build.toolset import flags
+from b2.build.feature import feature
+from b2.manager import get_manager
+
+def init():
+ pass
+
+type.register('MC', ['mc'])
+
+
+# Command line options
+feature('mc-input-encoding', ['ansi', 'unicode'], ['free'])
+feature('mc-output-encoding', ['unicode', 'ansi'], ['free'])
+feature('mc-set-customer-bit', ['no', 'yes'], ['free'])
+
+flags('mc.compile', 'MCFLAGS', ['<mc-input-encoding>ansi'], ['-a'])
+flags('mc.compile', 'MCFLAGS', ['<mc-input-encoding>unicode'], ['-u'])
+flags('mc.compile', 'MCFLAGS', ['<mc-output-encoding>ansi'], '-A')
+flags('mc.compile', 'MCFLAGS', ['<mc-output-encoding>unicode'], ['-U'])
+flags('mc.compile', 'MCFLAGS', ['<mc-set-customer-bit>no'], [])
+flags('mc.compile', 'MCFLAGS', ['<mc-set-customer-bit>yes'], ['-c'])
+
+generators.register_standard('mc.compile', ['MC'], ['H', 'RC'])
+
+get_manager().engine().register_action(
+ 'mc.compile',
+ 'mc $(MCFLAGS) -h "$(<[1]:DW)" -r "$(<[2]:DW)" "$(>:W)"')
diff --git a/tools/build/v2/tools/midl.py b/tools/build/v2/tools/midl.py
new file mode 100644
index 0000000000..45811d16bc
--- /dev/null
+++ b/tools/build/v2/tools/midl.py
@@ -0,0 +1,134 @@
+# Copyright (c) 2005 Alexey Pakhunov.
+# Copyright (c) 2011 Juraj Ivancic
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Microsoft Interface Definition Language (MIDL) related routines
+from b2.build import scanner, type
+from b2.build.toolset import flags
+from b2.build.feature import feature
+from b2.manager import get_manager
+from b2.tools import builtin, common
+from b2.util import regex
+
+def init():
+ pass
+
+type.register('IDL', ['idl'])
+
+# A type library (.tlb) is generated by MIDL compiler and can be included
+# to resources of an application (.rc). In order to be found by a resource
+# compiler its target type should be derived from 'H' - otherwise
+# the property '<implicit-dependency>' will be ignored.
+type.register('MSTYPELIB', 'tlb', 'H')
+
+# Register scanner for MIDL files
+class MidlScanner(scanner.Scanner):
+ def __init__ (self, includes=[]):
+ scanner.Scanner.__init__(self)
+ self.includes = includes
+
+ # List of quoted strings
+ re_strings = "[ \t]*\"([^\"]*)\"([ \t]*,[ \t]*\"([^\"]*)\")*[ \t]*" ;
+
+ # 'import' and 'importlib' directives
+ self.re_import = "import" + re_strings + "[ \t]*;" ;
+ self.re_importlib = "importlib[ \t]*[(]" + re_strings + "[)][ \t]*;" ;
+
+ # C preprocessor 'include' directive
+ self.re_include_angle = "#[ \t]*include[ \t]*<(.*)>" ;
+ self.re_include_quoted = "#[ \t]*include[ \t]*\"(.*)\"" ;
+
+ def pattern():
+ # Match '#include', 'import' and 'importlib' directives
+ return "((#[ \t]*include|import(lib)?).+(<(.*)>|\"(.*)\").+)"
+
+ def process(self, target, matches, binding):
+ included_angle = regex.transform(matches, self.re_include_angle)
+ included_quoted = regex.transform(matches, self.re_include_quoted)
+ imported = regex.transform(matches, self.re_import, [1, 3])
+ imported_tlbs = regex.transform(matches, self.re_importlib, [1, 3])
+
+ # CONSIDER: the new scoping rule seem to defeat "on target" variables.
+ g = bjam.call('get-target-variable', target, 'HDRGRIST')
+ b = os.path.normalize_path(os.path.dirname(binding))
+
+ # Attach binding of including file to included targets.
+ # When target is directly created from virtual target
+ # this extra information is unnecessary. But in other
+ # cases, it allows to distinguish between two headers of the
+ # same name included from different places.
+ g2 = g + "#" + b
+
+ g = "<" + g + ">"
+ g2 = "<" + g2 + ">"
+
+ included_angle = [ g + x for x in included_angle ]
+ included_quoted = [ g + x for x in included_quoted ]
+ imported = [ g + x for x in imported ]
+ imported_tlbs = [ g + x for x in imported_tlbs ]
+
+ all = included_angle + included_quoted + imported
+
+ bjam.call('INCLUDES', [target], all)
+ bjam.call('DEPENDS', [target], imported_tlbs)
+ bjam.call('NOCARE', all + imported_tlbs)
+ engine.set_target_variable(included_angle , 'SEARCH', ungrist(self.includes))
+ engine.set_target_variable(included_quoted, 'SEARCH', b + ungrist(self.includes))
+ engine.set_target_variable(imported , 'SEARCH', b + ungrist(self.includes))
+ engine.set_target_variable(imported_tlbs , 'SEARCH', b + ungrist(self.includes))
+
+ get_manager().scanners().propagate(type.get_scanner('CPP', PropertySet(self.includes)), included_angle + included_quoted)
+ get_manager().scanners().propagate(self, imported)
+
+scanner.register(MidlScanner, 'include')
+type.set_scanner('IDL', MidlScanner)
+
+
+# Command line options
+feature('midl-stubless-proxy', ['yes', 'no'], ['propagated'] )
+feature('midl-robust', ['yes', 'no'], ['propagated'] )
+
+flags('midl.compile.idl', 'MIDLFLAGS', ['<midl-stubless-proxy>yes'], ['/Oicf' ])
+flags('midl.compile.idl', 'MIDLFLAGS', ['<midl-stubless-proxy>no' ], ['/Oic' ])
+flags('midl.compile.idl', 'MIDLFLAGS', ['<midl-robust>yes' ], ['/robust' ])
+flags('midl.compile.idl', 'MIDLFLAGS', ['<midl-robust>no' ], ['/no_robust'])
+
+# Architecture-specific options
+architecture_x86 = ['<architecture>' , '<architecture>x86']
+address_model_32 = ['<address-model>', '<address-model>32']
+address_model_64 = ['<address-model>', '<address-model>64']
+
+flags('midl.compile.idl', 'MIDLFLAGS', [ar + '/' + m for ar in architecture_x86 for m in address_model_32 ], ['/win32'])
+flags('midl.compile.idl', 'MIDLFLAGS', [ar + '/<address-model>64' for ar in architecture_x86], ['/x64'])
+flags('midl.compile.idl', 'MIDLFLAGS', ['<architecture>ia64/' + m for m in address_model_64], ['/ia64'])
+
+flags('midl.compile.idl', 'DEFINES', [], ['<define>'])
+flags('midl.compile.idl', 'UNDEFS', [], ['<undef>'])
+flags('midl.compile.idl', 'INCLUDES', [], ['<include>'])
+
+
+builtin.register_c_compiler('midl.compile.idl', ['IDL'], ['MSTYPELIB', 'H', 'C(%_i)', 'C(%_proxy)', 'C(%_dlldata)'], [])
+
+
+# MIDL does not always generate '%_proxy.c' and '%_dlldata.c'. This behavior
+# depends on contents of the source IDL file. Calling TOUCH_FILE below ensures
+# that both files will be created so bjam will not try to recreate them
+# constantly.
+get_manager().engine().register_action(
+ 'midl.compile.idl',
+ '''midl /nologo @"@($(<[1]:W).rsp:E=
+"$(>:W)"
+-D$(DEFINES)
+"-I$(INCLUDES)"
+-U$(UNDEFS)
+$(MIDLFLAGS)
+/tlb "$(<[1]:W)"
+/h "$(<[2]:W)"
+/iid "$(<[3]:W)"
+/proxy "$(<[4]:W)"
+/dlldata "$(<[5]:W)")"
+{touch} "$(<[4]:W)"
+{touch} "$(<[5]:W)"'''.format(touch=common.file_creation_command()))
diff --git a/tools/build/v2/tools/msvc.jam b/tools/build/v2/tools/msvc.jam
index e33a66d22b..22548323ad 100644
--- a/tools/build/v2/tools/msvc.jam
+++ b/tools/build/v2/tools/msvc.jam
@@ -736,7 +736,11 @@ local rule configure-really ( version ? : options * )
# version from the path.
# FIXME: We currently detect both Microsoft Visual Studio 9.0 and
# 9.0express as 9.0 here.
- if [ MATCH "(Microsoft Visual Studio 10)" : $(command) ]
+ if [ MATCH "(Microsoft Visual Studio 11)" : $(command) ]
+ {
+ version = 11.0 ;
+ }
+ else if [ MATCH "(Microsoft Visual Studio 10)" : $(command) ]
{
version = 10.0 ;
}
@@ -1351,7 +1355,7 @@ if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
# Known toolset versions, in order of preference.
-.known-versions = 10.0 10.0express 9.0 9.0express 8.0 8.0express 7.1 7.1toolkit 7.0 6.0 ;
+.known-versions = 11.0 10.0 10.0express 9.0 9.0express 8.0 8.0express 7.1 7.1toolkit 7.0 6.0 ;
# Version aliases.
.version-alias-6 = 6.0 ;
@@ -1360,6 +1364,7 @@ if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
.version-alias-8 = 8.0 ;
.version-alias-9 = 9.0 ;
.version-alias-10 = 10.0 ;
+.version-alias-11 = 11.0 ;
# Names of registry keys containing the Visual C++ installation path (relative
# to "HKEY_LOCAL_MACHINE\SOFTWARE\\Microsoft").
@@ -1372,6 +1377,7 @@ if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
.version-9.0express-reg = "VCExpress\\9.0\\Setup\\VC" ;
.version-10.0-reg = "VisualStudio\\10.0\\Setup\\VC" ;
.version-10.0express-reg = "VCExpress\\10.0\\Setup\\VC" ;
+.version-11.0-reg = "VisualStudio\\11.0\\Setup\\VC" ;
# Visual C++ Toolkit 2003 does not store its installation path in the registry.
# The environment variable 'VCToolkitInstallDir' and the default installation
diff --git a/tools/build/v2/tools/msvc.py b/tools/build/v2/tools/msvc.py
new file mode 100644
index 0000000000..f4448daab4
--- /dev/null
+++ b/tools/build/v2/tools/msvc.py
@@ -0,0 +1,1198 @@
+# Copyright (c) 2003 David Abrahams.
+# Copyright (c) 2005 Vladimir Prus.
+# Copyright (c) 2005 Alexey Pakhunov.
+# Copyright (c) 2006 Bojan Resnik.
+# Copyright (c) 2006 Ilya Sokolov.
+# Copyright (c) 2007 Rene Rivera
+# Copyright (c) 2008 Jurko Gospodnetic
+# Copyright (c) 2011 Juraj Ivancic
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+################################################################################
+#
+# MSVC Boost Build toolset module.
+# --------------------------------
+#
+# All toolset versions need to have their location either auto-detected or
+# explicitly specified except for the special 'default' version that expects the
+# environment to find the needed tools or report an error.
+#
+################################################################################
+
+from os import environ
+import os.path
+import re
+import _winreg
+
+import bjam
+
+from b2.tools import common, rc, pch, builtin, mc, midl
+from b2.build import feature, type, toolset, generators, property_set
+from b2.build.property import Property
+from b2.util import path
+from b2.manager import get_manager
+from b2.build.generators import Generator
+from b2.build.toolset import flags
+from b2.util.utility import to_seq, on_windows
+from b2.tools.common import Configurations
+
+__debug = None
+
+def debug():
+ global __debug
+ if __debug is None:
+ __debug = "--debug-configuration" in bjam.variable("ARGV")
+ return __debug
+
+
+# It is not yet clear what to do with Cygwin on python port.
+def on_cygwin():
+ return False
+
+
+type.register('MANIFEST', ['manifest'])
+feature.feature('embed-manifest',['on','off'], ['incidental', 'propagated']) ;
+
+type.register('PDB',['pdb'])
+
+################################################################################
+#
+# Public rules.
+#
+################################################################################
+
+# Initialize a specific toolset version configuration. As the result, path to
+# compiler and, possible, program names are set up, and will be used when that
+# version of compiler is requested. For example, you might have:
+#
+# using msvc : 6.5 : cl.exe ;
+# using msvc : 7.0 : Y:/foo/bar/cl.exe ;
+#
+# The version parameter may be ommited:
+#
+# using msvc : : Z:/foo/bar/cl.exe ;
+#
+# The following keywords have special meanings when specified as versions:
+# - all - all detected but not yet used versions will be marked as used
+# with their default options.
+# - default - this is an equivalent to an empty version.
+#
+# Depending on a supplied version, detected configurations and presence 'cl.exe'
+# in the path different results may be achieved. The following table describes
+# the possible scenarios:
+#
+# Nothing "x.y"
+# Passed Nothing "x.y" detected, detected,
+# version detected detected cl.exe in path cl.exe in path
+#
+# default Error Use "x.y" Create "default" Use "x.y"
+# all None Use all None Use all
+# x.y - Use "x.y" - Use "x.y"
+# a.b Error Error Create "a.b" Create "a.b"
+#
+# "x.y" - refers to a detected version;
+# "a.b" - refers to an undetected version.
+#
+# FIXME: Currently the command parameter and the <compiler> property parameter
+# seem to overlap in duties. Remove this duplication. This seems to be related
+# to why someone started preparing to replace init with configure rules.
+
+def init(version = None, command = None, options = None):
+ # When initialized from
+ # using msvc : x.0 ;
+ # we get version as a single element list i.e. ['x.0'],
+ # but when specified from the command line we get a string i.e. 'x.0'.
+ # We want to work with a string, so unpack the list if needed.
+ is_single_element_list = (isinstance(version,list) and len(version) == 1)
+ assert(version==None or isinstance(version,str) or is_single_element_list)
+ if is_single_element_list:
+ version = version[0]
+
+ options = to_seq(options)
+ command = to_seq(command)
+
+ if command:
+ options.append("<command>"+command)
+ configure(version,options)
+
+def configure(version=None, options=None):
+ if version == "all":
+ if options:
+ raise RuntimeError("MSVC toolset configuration: options should be empty when '{}' is specified.".format(version))
+
+ # Configure (i.e. mark as used) all registered versions.
+ all_versions = __versions.all()
+ if not all_versions:
+ if debug():
+ print "notice: [msvc-cfg] Asked to configure all registered" \
+ "msvc toolset versions when there are none currently" \
+ "registered." ;
+ else:
+ for v in all_versions:
+ # Note that there is no need to skip already configured
+ # versions here as this will request configure-really rule
+ # to configure the version using default options which will
+ # in turn cause it to simply do nothing in case the version
+ # has already been configured.
+ configure_really(v)
+ elif version == "default":
+ configure_really(None,options)
+ else:
+ configure_really(version, options)
+
+def extend_conditions(conditions,exts):
+ return [ cond + '/' + ext for cond in conditions for ext in exts ]
+
+def configure_version_specific(toolset_arg, version, conditions):
+ # Starting with versions 7.0, the msvc compiler have the /Zc:forScope and
+ # /Zc:wchar_t options that improve C++ standard conformance, but those
+ # options are off by default. If we are sure that the msvc version is at
+ # 7.*, add those options explicitly. We can be sure either if user specified
+ # version 7.* explicitly or if we auto-detected the version ourselves.
+ if not re.match('^6\\.', version):
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS',conditions, ['/Zc:forScope','/Zc:wchar_t'])
+ toolset.flags('{}.compile.c++'.format(toolset_arg), 'C++FLAGS',conditions, ['/wd4675'])
+
+ # Explicitly disable the 'function is deprecated' warning. Some msvc
+ # versions have a bug, causing them to emit the deprecation warning even
+ # with /W0.
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS',extend_conditions(conditions,['<warnings>off']), ['/wd4996'])
+ if re.match('^[78]\\.', version):
+ # 64-bit compatibility warning deprecated since 9.0, see
+ # http://msdn.microsoft.com/en-us/library/yt4xw8fh.aspx
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS',extend_conditions(conditions,['<warnings>all']), ['/Wp64'])
+
+ #
+ # Processor-specific optimization.
+ #
+ if re.match('^[67]', version ):
+ # 8.0 deprecates some of the options.
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['<optimization>speed','<optimization>space']), ['/Ogiy', '/Gs'])
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['<optimization>speed']), ['/Ot'])
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['<optimization>space']), ['/Os'])
+
+ cpu_arch_i386_cond = extend_conditions(conditions, __cpu_arch_i386)
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(cpu_arch_i386_cond,['<instruction-set>']),['/GB'])
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(cpu_arch_i386_cond,['<instruction-set>i386']),['/G3'])
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(cpu_arch_i386_cond,['<instruction-set>i486']),['/G4'])
+
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(cpu_arch_i386_cond,['<instruction-set>' + t for t in __cpu_type_g5]), ['/G5'])
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(cpu_arch_i386_cond,['<instruction-set>' + t for t in __cpu_type_g6]), ['/G6'])
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(cpu_arch_i386_cond,['<instruction-set>' + t for t in __cpu_type_g7]), ['/G7'])
+
+ # Improve floating-point accuracy. Otherwise, some of C++ Boost's "math"
+ # tests will fail.
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', conditions, ['/Op'])
+
+ # 7.1 and below have single-threaded static RTL.
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['<runtime-debugging>off/<runtime-link>static/<threading>single']), ['/ML'])
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['<runtime-debugging>on/<runtime-link>static/<threading>single']), ['/MLd'])
+ else:
+ # 8.0 and above adds some more options.
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions, [a + '/<instruction-set>' for a in __cpu_arch_amd64]), ['/favor:blend'])
+
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions, [a + '/<instruction-set>' + t for a in __cpu_arch_amd64 for t in __cpu_type_em64t]), ['/favor:EM64T'])
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions, [a + '/<instruction-set>' + t for a in __cpu_arch_amd64 for t in __cpu_type_amd64]), ['/favor:AMD64'])
+
+ # 8.0 and above only has multi-threaded static RTL.
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['<runtime-debugging>off/<runtime-link>static/<threading>single']), ['/MT'])
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['<runtime-debugging>on/<runtime-link>static/<threading>single']), ['/MTd'])
+
+ # Specify target machine type so the linker will not need to guess.
+ toolset.flags('{}.link'.format(toolset_arg), 'LINKFLAGS', extend_conditions(conditions, __cpu_arch_amd64), ['/MACHINE:X64'])
+ toolset.flags('{}.link'.format(toolset_arg), 'LINKFLAGS', extend_conditions(conditions, __cpu_arch_i386), ['/MACHINE:X86'])
+ toolset.flags('{}.link'.format(toolset_arg), 'LINKFLAGS', extend_conditions(conditions, __cpu_arch_ia64), ['/MACHINE:IA64'])
+
+ # Make sure that manifest will be generated even if there is no
+ # dependencies to put there.
+ toolset.flags('{}.link'.format(toolset_arg), 'LINKFLAGS', extend_conditions(conditions,["<embed-manifest>off"]), ['/MANIFEST'])
+
+
+# Registers this toolset including all of its flags, features & generators. Does
+# nothing on repeated calls.
+
+def register_toolset():
+ if not 'msvc' in feature.values('toolset'):
+ register_toolset_really()
+
+
+engine = get_manager().engine()
+
+# this rule sets up the pdb file that will be used when generating static
+# libraries and the debug-store option is database, so that the compiler
+# puts all debug info into a single .pdb file named after the library
+#
+# Poking at source targets this way is probably not clean, but it's the
+# easiest approach.
+def archive(targets, sources=None, properties=None):
+ bjam.call('set-target-variable',targets,'PDB_NAME', os.path.splitext(targets[0])[0] + '.pdb')
+
+# Declare action for creating static libraries. If library exists, remove it
+# before adding files. See
+# http://article.gmane.org/gmane.comp.lib.boost.build/4241 for rationale.
+if not on_cygwin():
+ engine.register_action(
+ 'msvc.archive',
+ '''if exist "$(<[1])" DEL "$(<[1])"
+ $(.LD) $(AROPTIONS) /out:"$(<[1])" @"@($(<[1]:W).rsp:E=
+"$(>)"
+$(LIBRARIES_MENTIONED_BY_FILE)
+"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib"
+"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"''',
+ function=archive)
+else:
+ engine.register_action(
+ 'msvc.archive',
+ '''{rm} "$(<[1])"
+ $(.LD) $(AROPTIONS) /out:"$(<[1])" @"@($(<[1]:W).rsp:E=
+"$(>)"
+$(LIBRARIES_MENTIONED_BY_FILE)
+"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib"
+"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"'''.format(rm=common.rm_command()),
+ function=archive)
+
+# For the assembler the following options are turned on by default:
+#
+# -Zp4 align structures to 4 bytes
+# -Cp preserve case of user identifiers
+# -Cx preserve case in publics, externs
+#
+engine.register_action(
+ 'msvc.compile.asm',
+ '$(.ASM) -c -Zp4 -Cp -Cx -D$(DEFINES) $(ASMFLAGS) $(USER_ASMFLAGS) -Fo "$(<:W)" "$(>:W)"' )
+
+
+# Equivalent to [ on $(target) return $(prefix)$(var)$(suffix) ]. Note that $(var) can be a list.
+def expand_target_variable(target,var,prefix=None,suffix=None):
+ list = bjam.call( 'get-target-variable', target, var )
+ return " ".join([ ("" if prefix is None else prefix) + elem + ("" if suffix is None else suffix) for elem in list ])
+
+
+compile_c_cpp_pch = '''$(.CC) @"@($(<[1]:W).rsp:E="$(>[2]:W)" -Fo"$(<[2]:W)" -Yc"$(>[1]:D=)" $(YLOPTION)"__bjam_pch_symbol_$(>[1]:D=)" -Fp"$(<[1]:W)" $(CC_RSPLINE))" "@($(<[1]:W).cpp:E=#include $(.escaped-double-quote)$(>[1]:D=)$(.escaped-double-quote)$(.nl))" $(.CC.FILTER)'''
+# Action for running the C/C++ compiler using precompiled headers. An already
+# built source file for compiling the precompiled headers is expected to be
+# given as one of the source parameters.
+compile_c_cpp_pch_s = '''$(.CC) @"@($(<[1]:W).rsp:E="$(>[2]:W)" -Fo"$(<[2]:W)" -Yc"$(>[1]:D=)" $(YLOPTION)"__bjam_pch_symbol_$(>[1]:D=)" -Fp"$(<[1]:W)" $(CC_RSPLINE))" $(.CC.FILTER)'''
+
+def get_rspline(targets, lang_opt):
+ result = lang_opt + ' ' + \
+ expand_target_variable(targets, 'UNDEFS', '-U' ) + ' ' + \
+ expand_target_variable(targets, 'CFLAGS' ) + ' ' + \
+ expand_target_variable(targets, 'C++FLAGS' ) + ' ' + \
+ expand_target_variable(targets, 'OPTIONS' ) + ' -c ' + \
+ expand_target_variable(targets, 'DEFINES', '\n-D' ) + ' ' + \
+ expand_target_variable(targets, 'INCLUDES', '\n"-I', '"' )
+ bjam.call('set-target-variable', targets, 'CC_RSPLINE', result)
+
+def compile_c(targets, sources = [], properties = None):
+ get_manager().engine().set_target_variable( targets[1], 'C++FLAGS', '' )
+ get_rspline(targets, '-TC')
+ sources += bjam.call('get-target-variable',targets,'PCH_FILE')
+ sources += bjam.call('get-target-variable',targets,'PCH_HEADER')
+ compile_c_cpp(targets,sources)
+
+def compile_c_preprocess(targets, sources = [], properties = None):
+ get_manager().engine().set_target_variable( target[1], 'C++FLAGS', '' )
+ get_rspline(targets, '-TC')
+ sources += bjam.call('get-target-variable',targets,'PCH_FILE')
+ sources += bjam.call('get-target-variable',targets,'PCH_HEADER')
+ preprocess_c_cpp(targets,sources)
+
+def compile_c_pch(targets, sources = [], properties = []):
+ get_manager().engine().set_target_variable( target[1], 'C++FLAGS', '' )
+ get_rspline([targets[1]], '-TC')
+ get_rspline([targets[2]], '-TC')
+ pch_source = bjam.call('get-target-variable', targets, 'PCH_SOURCE')
+ sources += pch_source
+ if pch_source:
+ get_manager().engine().set_update_action('compile-c-c++-pch-s', targets, sources, properties)
+ get_manager().engine().add_dependency(targets,pch_source)
+ compile_c_cpp_pch_s(targets,sources)
+ else:
+ get_manager().engine().set_update_action('compile-c-c++-pch', targets, sources, properties)
+ compile_c_cpp_pch(targets,sources)
+
+toolset.flags( 'msvc', 'YLOPTION', [], ['-Yl'] )
+
+def compile_cpp(targets,sources=[],properties=None):
+ get_rspline(targets,'-TP')
+ sources += bjam.call('get-target-variable',targets,'PCH_FILE')
+ sources += bjam.call('get-target-variable',targets,'PCH_HEADER')
+ compile_c_cpp(targets,sources)
+
+def compile_cpp_preprocess(targets,sources=[],properties=None):
+ get_rspline(targets,'-TP')
+ sources += bjam.call('get-target-variable',targets,'PCH_FILE')
+ sources += bjam.call('get-target-variable',targets,'PCH_HEADER')
+ preprocess_c_cpp(targets,sources)
+
+def compile_cpp_pch(targets,sources=[],properties=None):
+ get_rspline([targets[1]], '-TP')
+ get_rspline([targets[2]], '-TP')
+ pch_source = bjam.call('get-target-variable', targets, 'PCH_SOURCE')
+ sources += pch_source
+ if pch_source:
+ get_manager().engine().set_update_action('compile-c-c++-pch-s', targets, sources, properties)
+ get_manager().engine().add_dependency(targets,pch_source)
+ compile_c_cpp_pch_s(targets,sources)
+ else:
+ get_manager().engine().set_update_action('compile-c-c++-pch', targets, sources, properties)
+ compile_c_cpp_pch(targets,sources)
+
+
+# Action for running the C/C++ compiler without using precompiled headers.
+#
+# WARNING: Synchronize any changes this in action with intel-win
+#
+# Notes regarding PDB generation, for when we use <debug-symbols>on/<debug-store>database
+#
+# 1. PDB_CFLAG is only set for <debug-symbols>on/<debug-store>database, ensuring that the /Fd flag is dropped if PDB_CFLAG is empty
+#
+# 2. When compiling executables's source files, PDB_NAME is set on a per-source file basis by rule compile-c-c++.
+# The linker will pull these into the executable's PDB
+#
+# 3. When compiling library's source files, PDB_NAME is updated to <libname>.pdb for each source file by rule archive,
+# as in this case the compiler must be used to create a single PDB for our library.
+#
+
+compile_action = '$(.CC) @"@($(<[1]:W).rsp:E="$(>[1]:W)" -Fo"$(<[1]:W)" $(PDB_CFLAG)"$(PDB_NAME)" -Yu"$(>[3]:D=)" -Fp"$(>[2]:W)" $(CC_RSPLINE))" $(.CC.FILTER)'
+engine.register_action(
+ 'msvc.compile.c',
+ compile_action,
+ function=compile_c,
+ bound_list=['PDB_NAME'])
+
+engine.register_action(
+ 'msvc.compile.c++',
+ compile_action,
+ function=compile_cpp,
+ bound_list=['PDB_NAME'])
+
+
+preprocess_action = '$(.CC) @"@($(<[1]:W).rsp:E="$(>[1]:W)" -E $(PDB_CFLAG)"$(PDB_NAME)" -Yu"$(>[3]:D=)" -Fp"$(>[2]:W)" $(CC_RSPLINE))" >"$(<[1]:W)"'
+
+engine.register_action(
+ 'msvc.preprocess.c',
+ preprocess_action,
+ function=compile_c_preprocess,
+ bound_list=['PDB_NAME'])
+
+engine.register_action(
+ 'msvc.preprocess.c++',
+ preprocess_action,
+ function=compile_cpp_preprocess,
+ bound_list=['PDB_NAME'])
+
+def compile_c_cpp(targets,sources=None):
+ pch_header = bjam.call('get-target-variable',targets[0],'PCH_HEADER')
+ pch_file = bjam.call('get-target-variable',targets[0],'PCH_FILE')
+ if pch_header: get_manager().engine().add_dependency(targets[0],pch_header)
+ if pch_file: get_manager().engine().add_dependency(targets[0],pch_file)
+ bjam.call('set-target-variable',targets,'PDB_NAME', os.path.splitext(targets[0])[0] + '.pdb')
+
+def preprocess_c_cpp(targets,sources=None):
+ #same as above
+ return compile_c_cpp(targets,sources)
+
+# Action for running the C/C++ compiler using precompiled headers. In addition
+# to whatever else it needs to compile, this action also adds a temporary source
+# .cpp file used to compile the precompiled headers themselves.
+
+engine.register_action(
+ 'msvc.compile.c.pch',
+ None, # action set by the function
+ function=compile_c_pch)
+
+engine.register_action(
+ 'msvc.compile.c++.pch',
+ None, # action set by the function
+ function=compile_cpp_pch)
+
+
+# See midl.py for details.
+#
+engine.register_action(
+ 'msvc.compile.idl',
+ '''$(.IDL) /nologo @"@($(<[1]:W).rsp:E=
+"$(>:W)"
+-D$(DEFINES)
+"-I$(INCLUDES:W)"
+-U$(UNDEFS)
+$(MIDLFLAGS)
+/tlb "$(<[1]:W)"
+/h "$(<[2]:W)"
+/iid "$(<[3]:W)"
+/proxy "$(<[4]:W)"
+/dlldata "$(<[5]:W)")"
+ {touch} "$(<[4]:W)"
+ {touch} "$(<[5]:W)"'''.format(touch=common.file_creation_command()))
+
+engine.register_action(
+ 'msvc.compile.mc',
+ '$(.MC) $(MCFLAGS) -h "$(<[1]:DW)" -r "$(<[2]:DW)" "$(>:W)"')
+
+engine.register_action(
+ 'msvc.compile.rc',
+ '$(.RC) -l 0x409 -U$(UNDEFS) -D$(DEFINES) -I"$(INCLUDES:W)" -fo "$(<:W)" "$(>:W)"')
+
+def link_dll(targets,sources=None,properties=None):
+ get_manager().engine().add_dependency(targets,bjam.call('get-target-variable',targets,'DEF_FILE'))
+ manifest(targets, sources, properties)
+
+def manifest(targets,sources=None,properties=None):
+ if 'on' in properties.get('<embed-manifest>'):
+ get_manager().engine().set_update_action('msvc.manifest', targets, sources, properties)
+
+
+# Incremental linking a DLL causes no end of problems: if the actual exports do
+# not change, the import .lib file is never updated. Therefore, the .lib is
+# always out-of-date and gets rebuilt every time. I am not sure that incremental
+# linking is such a great idea in general, but in this case I am sure we do not
+# want it.
+
+# Windows manifest is a new way to specify dependencies on managed DotNet
+# assemblies and Windows native DLLs. The manifests are embedded as resources
+# and are useful in any PE target (both DLL and EXE).
+
+if not on_cygwin():
+ engine.register_action(
+ 'msvc.link',
+ '''$(.LD) $(LINKFLAGS) /out:"$(<[1]:W)" /LIBPATH:"$(LINKPATH:W)" $(OPTIONS) @"@($(<[1]:W).rsp:E=
+"$(>)"
+$(LIBRARIES_MENTIONED_BY_FILE)
+$(LIBRARIES)
+"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib"
+"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
+if %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL%''',
+ function=manifest,
+ bound_list=['PDB_NAME','DEF_FILE','LIBRARIES_MENTIONED_BY_FILE'])
+
+ engine.register_action(
+ 'msvc.manifest',
+ '''if exist "$(<[1]).manifest" (
+ $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);1"
+ )''')
+
+ engine.register_action(
+ 'msvc.link.dll',
+ '''$(.LD) /DLL $(LINKFLAGS) /out:"$(<[1]:W)" /IMPLIB:"$(<[2]:W)" /LIBPATH:"$(LINKPATH:W)" /def:"$(DEF_FILE)" $(OPTIONS) @"@($(<[1]:W).rsp:E=
+"$(>)"
+$(LIBRARIES_MENTIONED_BY_FILE)
+$(LIBRARIES)
+"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib"
+"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
+if %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL%''',
+ function=link_dll,
+ bound_list=['DEF_FILE','LIBRARIES_MENTIONED_BY_FILE'])
+
+ engine.register_action(
+ 'msvc.manifest.dll',
+ '''if exist "$(<[1]).manifest" (
+ $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);2"
+ )''')
+else:
+ engine.register_action(
+ 'msvc.link',
+ '''$(.LD) $(LINKFLAGS) /out:"$(<[1]:W)" /LIBPATH:"$(LINKPATH:W)" $(OPTIONS) @"@($(<[1]:W).rsp:E=
+"$(>)"
+$(LIBRARIES_MENTIONED_BY_FILE)
+$(LIBRARIES)
+"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib"
+"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"''',
+ function=manifest,
+ bound_list=['PDB_NAME','DEF_FILE','LIBRARIES_MENTIONED_BY_FILE'])
+
+ engine.register_action(
+ 'msvc.manifest',
+ '''if test -e "$(<[1]).manifest"; then
+ $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);1"
+ fi''')
+
+ engine.register_action(
+ 'msvc.link.dll',
+ '''$(.LD) /DLL $(LINKFLAGS) /out:"$(<[1]:W)" /IMPLIB:"$(<[2]:W)" /LIBPATH:"$(LINKPATH:W)" /def:"$(DEF_FILE)" $(OPTIONS) @"@($(<[1]:W).rsp:E=
+"$(>)"
+$(LIBRARIES_MENTIONED_BY_FILE)
+$(LIBRARIES)
+"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib"
+"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"''',
+ function=link_dll,
+ bound_list=['DEF_FILE','LIBRARIES_MENTIONED_BY_FILE'])
+
+ engine.register_action(
+ 'msvc.manifest.dll',
+ '''if test -e "$(<[1]).manifest"; then
+ $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);2"
+ fi''')
+
+
+################################################################################
+#
+# Classes.
+#
+################################################################################
+
+class MsvcPchGenerator(pch.PchGenerator):
+
+ # Inherit the __init__ method
+
+ def run_pch(self, project, name, prop_set, sources):
+ # Find the header in sources. Ignore any CPP sources.
+ pch_header = None
+ pch_source = None
+ for s in sources:
+ if type.is_derived(s.type(), 'H'):
+ pch_header = s
+ elif type.is_derived(s.type(), 'CPP') or type.is_derived(s.type(), 'C'):
+ pch_source = s
+
+ if not pch-header:
+ raise RuntimeError( "can not build pch without pch-header" )
+
+ # If we do not have the PCH source - that is fine. We will just create a
+ # temporary .cpp file in the action.
+ temp_prop_set = property_set.create([Property('pch-source',pch_source)]+prop_set.all())
+ generated = Generator.run(project,name,temp_prop_set,pch_header)
+ pch_file = None
+ for g in generated:
+ if type.is_derived(g.type(), 'PCH'):
+ pch_file = g
+ return property_set.create([Property('pch-header',pch_header),Property('pch-file',pch_file)]+generated)
+
+
+################################################################################
+#
+# Local rules.
+#
+################################################################################
+
+# Detects versions listed as '_known_versions' by checking registry information,
+# environment variables & default paths. Supports both native Windows and
+# Cygwin.
+def auto_detect_toolset_versions():
+ if on_windows() or on_cygwin():
+ for version in _known_versions:
+ versionVarName = '__version_{}_reg'.format(version.replace('.','_'))
+ if versionVarName in globals():
+ vc_path = None
+ for x in [ '', 'Wow6432Node\\' ]:
+ try:
+ with _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\{}{}'.format(x, globals()[versionVarName])) as reg_key:
+ vc_path = _winreg.QueryValueEx(reg_key, "ProductDir")[0]
+ except:
+ pass
+ if vc_path:
+ vc_path = os.path.join(vc_path,'bin')
+ register_configuration(version,os.path.normpath(vc_path))
+
+ for i in _known_versions:
+ if not i in __versions.all():
+ register_configuration(i,default_path(i))
+
+
+# Worker rule for toolset version configuration. Takes an explicit version id or
+# nothing in case it should configure the default toolset version (the first
+# registered one or a new 'default' one in case no toolset versions have been
+# registered yet).
+#
+
+def configure_really(version=None, options=[]):
+ v = version
+ if not v:
+ # Take the first registered (i.e. auto-detected) version.
+ version = __versions.first()
+ v = version
+
+ # Note: 'version' can still be empty at this point if no versions have
+ # been auto-detected.
+ if not version:
+ version = "default"
+
+ # Version alias -> real version number.
+ version = globals().get("__version_alias_{}".format(version), version)
+
+ # Check whether the selected configuration is already in use.
+ if version in __versions.used():
+ # Allow multiple 'toolset.using' calls for the same configuration if the
+ # identical sets of options are used.
+ if options and options != __versions.get(version,'options'):
+ raise RuntimeError("MSVC toolset configuration: Toolset version '$(version)' already configured.".format(version))
+ else:
+ # Register a new configuration.
+ __versions.register(version)
+
+ # Add user-supplied to auto-detected options.
+ version_opts = __versions.get(version, 'options')
+ if (version_opts):
+ options = version_opts + options
+
+ # Mark the configuration as 'used'.
+ __versions.use(version)
+ # Generate conditions and save them.
+ conditions = common.check_init_parameters('msvc', None, ('version', v))
+ __versions.set(version, 'conditions', conditions)
+ command = feature.get_values('<command>', options)
+
+ # If version is specified, we try to search first in default paths, and
+ # only then in PATH.
+ command = common.get_invocation_command('msvc', 'cl.exe', command, default_paths(version))
+ common.handle_options('msvc', conditions, command, options)
+
+ if not version:
+ # Even if version is not explicitly specified, try to detect the
+ # version from the path.
+ # FIXME: We currently detect both Microsoft Visual Studio 9.0 and
+ # 9.0express as 9.0 here.
+ if re.search("Microsoft Visual Studio 11", command):
+ version = '11.0'
+ if re.search("Microsoft Visual Studio 10", command):
+ version = '10.0'
+ elif re.search("Microsoft Visual Studio 9", command):
+ version = '9.0'
+ elif re.search("Microsoft Visual Studio 8", command):
+ version = '8.0'
+ elif re.search("NET 2003[\/\\]VC7", command):
+ version = '7.1'
+ elif re.search("Microsoft Visual C\\+\\+ Toolkit 2003", command):
+ version = '7.1toolkit'
+ elif re.search(".NET[\/\\]VC7", command):
+ version = '7.0'
+ else:
+ version = '6.0'
+
+ # Generate and register setup command.
+
+ below_8_0 = re.search("^[67]\\.",version) != None
+
+ if below_8_0:
+ cpu = ['i386']
+ else:
+ cpu = ['i386', 'amd64', 'ia64']
+
+ setup_scripts = {}
+
+ if command:
+ # TODO: Note that if we specify a non-existant toolset version then
+ # this rule may find and use a corresponding compiler executable
+ # belonging to an incorrect toolset version. For example, if you
+ # have only MSVC 7.1 installed, have its executable on the path and
+ # specify you want Boost Build to use MSVC 9.0, then you want Boost
+ # Build to report an error but this may cause it to silently use the
+ # MSVC 7.1 compiler even though it thinks it is using the msvc-9.0
+ # toolset version.
+ command = common.get_absolute_tool_path(command)
+
+ if command:
+ parent = os.path.dirname(os.path.normpath(command))
+ # Setup will be used if the command name has been specified. If
+ # setup is not specified explicitly then a default setup script will
+ # be used instead. Setup scripts may be global or arhitecture/
+ # /platform/cpu specific. Setup options are used only in case of
+ # global setup scripts.
+
+ # Default setup scripts provided with different VC distributions:
+ #
+ # VC 7.1 had only the vcvars32.bat script specific to 32 bit i386
+ # builds. It was located in the bin folder for the regular version
+ # and in the root folder for the free VC 7.1 tools.
+ #
+ # Later 8.0 & 9.0 versions introduce separate platform specific
+ # vcvars*.bat scripts (e.g. 32 bit, 64 bit AMD or 64 bit Itanium)
+ # located in or under the bin folder. Most also include a global
+ # vcvarsall.bat helper script located in the root folder which runs
+ # one of the aforementioned vcvars*.bat scripts based on the options
+ # passed to it. So far only the version coming with some PlatformSDK
+ # distributions does not include this top level script but to
+ # support those we need to fall back to using the worker scripts
+ # directly in case the top level script can not be found.
+
+ global_setup = feature.get_values('<setup>',options)
+ if global_setup:
+ global_setup = global_setup[0]
+ else:
+ global_setup = None
+
+ if not below_8_0 and not global_setup:
+ global_setup = locate_default_setup(command,parent,'vcvarsall.bat')
+
+
+ default_setup = {
+ 'amd64' : 'vcvarsx86_amd64.bat',
+ 'i386' : 'vcvars32.bat',
+ 'ia64' : 'vcvarsx86_ia64.bat' }
+
+ # http://msdn2.microsoft.com/en-us/library/x4d2c09s(VS.80).aspx and
+ # http://msdn2.microsoft.com/en-us/library/x4d2c09s(vs.90).aspx
+ # mention an x86_IPF option, that seems to be a documentation bug
+ # and x86_ia64 is the correct option.
+ default_global_setup_options = {
+ 'amd64' : 'x86_amd64',
+ 'i386' : 'x86',
+ 'ia64' : 'x86_ia64' }
+
+ somehow_detect_the_itanium_platform = None
+ # When using 64-bit Windows, and targeting 64-bit, it is possible to
+ # use a native 64-bit compiler, selected by the "amd64" & "ia64"
+ # parameters to vcvarsall.bat. There are two variables we can use --
+ # PROCESSOR_ARCHITECTURE and PROCESSOR_IDENTIFIER. The first is
+ # 'x86' when running 32-bit Windows, no matter which processor is
+ # used, and 'AMD64' on 64-bit windows on x86 (either AMD64 or EM64T)
+ # Windows.
+ #
+ if re.search( 'AMD64', environ[ "PROCESSOR_ARCHITECTURE" ] ) != None:
+ default_global_setup_options[ 'amd64' ] = 'amd64'
+ # TODO: The same 'native compiler usage' should be implemented for
+ # the Itanium platform by using the "ia64" parameter. For this
+ # though we need someone with access to this platform who can find
+ # out how to correctly detect this case.
+ elif somehow_detect_the_itanium_platform:
+ default_global_setup_options[ 'ia64' ] = 'ia64'
+
+ setup_prefix = "call "
+ setup_suffix = """ >nul\n"""
+ if on_cygwin():
+ setup_prefix = "cmd.exe /S /C call "
+ setup_suffix = " \">nul\" \"&&\" "
+
+ for c in cpu:
+ setup_options = None
+ setup_cpu = feature.get_values('<setup-{}>'.format(c),options)
+
+ if not setup_cpu:
+ if global_setup:
+ setup_cpu = global_setup
+ # If needed we can easily add using configuration flags
+ # here for overriding which options get passed to the
+ # global setup command for which target platform:
+ # setup_options = feature.get_values('<setup-options-{}>'.format(c),options)
+ if not setup_options:
+ setup_options = default_global_setup_options[ c ]
+ else:
+ setup_cpu = locate_default_setup(command, parent, default_setup[ c ])
+
+ # Cygwin to Windows path translation.
+ # setup-$(c) = "\""$(setup-$(c):W)"\"" ;
+
+ # Append setup options to the setup name and add the final setup
+ # prefix & suffix.
+ setup_scripts[ c ] = '{}"{}" {}{}'.format(setup_prefix, setup_cpu, setup_options, setup_suffix)
+
+ # Get tool names (if any) and finish setup.
+ compiler = feature.get_values("<compiler>", options)
+ if not compiler:
+ compiler = "cl"
+
+ linker = feature.get_values("<linker>", options)
+ if not linker:
+ linker = "link"
+
+ resource_compiler = feature.get_values("<resource-compiler>", options)
+ if not resource_compiler:
+ resource_compiler = "rc"
+
+ # Turn on some options for i386 assembler
+ # -coff generate COFF format object file (compatible with cl.exe output)
+ default_assembler_amd64 = 'ml64'
+ default_assembler_i386 = 'ml -coff'
+ default_assembler_ia64 = 'ias'
+
+ assembler = feature.get_values('<assembler>',options)
+
+ idl_compiler = feature.get_values('<idl-compiler>',options)
+ if not idl_compiler:
+ idl_compiler = 'midl'
+
+ mc_compiler = feature.get_values('<mc-compiler>',options)
+ if not mc_compiler:
+ mc_compiler = 'mc'
+
+ manifest_tool = feature.get_values('<manifest-tool>',options)
+ if not manifest_tool:
+ manifest_tool = 'mt'
+
+ cc_filter = feature.get_values('<compiler-filter>',options)
+
+ for c in cpu:
+ cpu_conditions = [ condition + '/' + arch for arch in globals()['__cpu_arch_{}'.format(c)] for condition in conditions ]
+
+ setup_script = setup_scripts.get(c, '')
+
+ if debug():
+ for cpu_condition in cpu_conditions:
+ print "notice: [msvc-cfg] condition: '{}', setup: '{}'".format(cpu_condition,setup_script)
+
+ cpu_assembler = assembler
+ if not cpu_assembler:
+ cpu_assembler = locals()['default_assembler_{}'.format(c)]
+
+ toolset.flags('msvc.compile', '.CC' , cpu_conditions, ['{}{} /Zm800 -nologo' .format(setup_script, compiler)])
+ toolset.flags('msvc.compile', '.RC' , cpu_conditions, ['{}{} -nologo' .format(setup_script, resource_compiler)])
+ toolset.flags('msvc.compile', '.ASM', cpu_conditions, ['{}{} ' .format(setup_script, cpu_assembler)])
+ toolset.flags('msvc.link' , '.LD' , cpu_conditions, ['{}{} /NOLOGO /INCREMENTAL:NO'.format(setup_script, linker)])
+ toolset.flags('msvc.archive', '.LD' , cpu_conditions, ['{}{} /lib /NOLOGO' .format(setup_script, linker)])
+ toolset.flags('msvc.compile', '.IDL', cpu_conditions, ['{}{} ' .format(setup_script, idl_compiler)])
+ toolset.flags('msvc.compile', '.MC' , cpu_conditions, ['{}{} ' .format(setup_script, mc_compiler)])
+ toolset.flags('msvc.link' , '.MT' , cpu_conditions, ['{}{} -nologo' .format(setup_script, manifest_tool)])
+
+ if cc_filter:
+ toolset.flags('msvc', '.CC.FILTER', cpu_conditions, ['"|" {}'.format(cc_filter)])
+
+ # Set version-specific flags.
+ configure_version_specific('msvc', version, conditions)
+
+
+# Returns the default installation path for the given version.
+#
+def default_path(version):
+ # Use auto-detected path if possible.
+ options = __versions.get(version, 'options')
+ tmp_path = None
+ if options:
+ tmp_path = feature.get_values('<command>', options)
+
+ if tmp_path:
+ tmp_path="".join(tmp_path)
+ tmp_path=os.path.dirname(tmp_path)
+ else:
+ env_var_var_name = '__version_{}_env'.format(version.replace('.','_'))
+ vc_path = None
+ if env_var_var_name in globals():
+ env_var_name = globals()[env_var_var_name]
+ if env_var_name in os.environ:
+ vc_path = environ[env_var_name]
+ if vc_path:
+ vc_path = os.path.join(vc_path,globals()['__version_{}_envpath'.format(version.replace('.','_'))])
+ tmp_path = os.path.normpath(vc_path)
+
+ var_name = '__version_{}_path'.format(version.replace('.','_'))
+ if not tmp_path and var_name in globals():
+ tmp_path = os.path.normpath(os.path.join(common.get_program_files_dir(), globals()[var_name]))
+ return tmp_path
+
+
+# Returns either the default installation path (if 'version' is not empty) or
+# list of all known default paths (if no version is given)
+#
+def default_paths(version = None):
+ possible_paths = []
+ if version:
+ path = default_path(version)
+ if path:
+ possible_paths.append(path)
+ else:
+ for i in _known_versions:
+ path = default_path(i)
+ if path:
+ possible_paths.append(path)
+ return possible_paths
+
+
+class MsvcLinkingGenerator(builtin.LinkingGenerator):
+ # Calls the base version. If necessary, also create a target for the
+ # manifest file.specifying source's name as the name of the created
+ # target. As result, the PCH will be named whatever.hpp.gch, and not
+ # whatever.gch.
+ def generated_targets(self, sources, prop_set, project, name):
+ result = builtin.LinkingGenerator.generated_targets(self, sources, prop_set, project, name)
+ if result:
+ name_main = result[0].name()
+ action = result[0].action()
+
+ if prop_set.get('<debug-symbols>') == 'on':
+ # We force exact name on PDB. The reason is tagging -- the tag rule may
+ # reasonably special case some target types, like SHARED_LIB. The tag rule
+ # will not catch PDB, and it cannot even easily figure if PDB is paired with
+ # SHARED_LIB or EXE or something else. Because PDB always get the
+ # same name as the main target, with .pdb as extension, just force it.
+ target = FileTarget(name_main.split_ext()[0]+'.pdb','PDB',project,action,True)
+ registered_target = virtual_target.register(target)
+ if target != registered_target:
+ action.replace_targets(target,registered_target)
+ result.append(registered_target)
+ if prop_set.get('<embed-manifest>') == 'off':
+ # Manifest is evil target. It has .manifest appened to the name of
+ # main target, including extension. E.g. a.exe.manifest. We use 'exact'
+ # name because to achieve this effect.
+ target = FileTarget(name_main+'.manifest', 'MANIFEST', project, action, True)
+ registered_target = virtual_target.register(target)
+ if target != registered_target:
+ action.replace_targets(target,registered_target)
+ result.append(registered_target)
+ return result
+
+
+# Unsafe worker rule for the register-toolset() rule. Must not be called
+# multiple times.
+
+def register_toolset_really():
+ feature.extend('toolset', ['msvc'])
+
+ # Intel and msvc supposedly have link-compatible objects.
+ feature.subfeature( 'toolset', 'msvc', 'vendor', 'intel', ['propagated', 'optional'])
+
+ # Inherit MIDL flags.
+ toolset.inherit_flags('msvc', 'midl')
+
+ # Inherit MC flags.
+ toolset.inherit_flags('msvc','mc')
+
+ # Dynamic runtime comes only in MT flavour.
+ toolset.add_requirements(['<toolset>msvc,<runtime-link>shared:<threading>multi'])
+
+ # Declare msvc toolset specific features.
+ feature.feature('debug-store', ['object', 'database'], ['propagated'])
+ feature.feature('pch-source', [], ['dependency', 'free'])
+
+ # Declare generators.
+
+ # TODO: Is it possible to combine these? Make the generators
+ # non-composing so that they do not convert each source into a separate
+ # .rsp file.
+ generators.register(MsvcLinkingGenerator('msvc.link', True, ['OBJ', 'SEARCHED_LIB', 'STATIC_LIB', 'IMPORT_LIB'], ['EXE'], ['<toolset>msvc']))
+ generators.register(MsvcLinkingGenerator('msvc.link.dll', True, ['OBJ', 'SEARCHED_LIB', 'STATIC_LIB', 'IMPORT_LIB'], ['SHARED_LIB','IMPORT_LIB'], ['<toolset>msvc']))
+
+ builtin.register_archiver('msvc.archive', ['OBJ'], ['STATIC_LIB'], ['<toolset>msvc'])
+ builtin.register_c_compiler('msvc.compile.c++', ['CPP'], ['OBJ'], ['<toolset>msvc'])
+ builtin.register_c_compiler('msvc.compile.c', ['C'], ['OBJ'], ['<toolset>msvc'])
+ builtin.register_c_compiler('msvc.compile.c++.preprocess', ['CPP'], ['PREPROCESSED_CPP'], ['<toolset>msvc'])
+ builtin.register_c_compiler('msvc.compile.c.preprocess', ['C'], ['PREPROCESSED_C'], ['<toolset>msvc'])
+
+ # Using 'register-c-compiler' adds the build directory to INCLUDES.
+ builtin.register_c_compiler('msvc.compile.rc', ['RC'], ['OBJ(%_res)'], ['<toolset>msvc'])
+ generators.override('msvc.compile.rc', 'rc.compile.resource')
+ generators.register_standard('msvc.compile.asm', ['ASM'], ['OBJ'], ['<toolset>msvc'])
+
+ builtin.register_c_compiler('msvc.compile.idl', ['IDL'], ['MSTYPELIB', 'H', 'C(%_i)', 'C(%_proxy)', 'C(%_dlldata)'], ['<toolset>msvc'])
+ generators.override('msvc.compile.idl', 'midl.compile.idl')
+
+ generators.register_standard('msvc.compile.mc', ['MC'], ['H','RC'], ['<toolset>msvc'])
+ generators.override('msvc.compile.mc', 'mc.compile')
+
+ # Note: the 'H' source type will catch both '.h' and '.hpp' headers as
+ # the latter have their HPP type derived from H. The type of compilation
+ # is determined entirely by the destination type.
+ generators.register(MsvcPchGenerator('msvc.compile.c.pch', False, ['H'], ['C_PCH','OBJ'], ['<pch>on', '<toolset>msvc']))
+ generators.register(MsvcPchGenerator('msvc.compile.c++.pch', False, ['H'], ['CPP_PCH','OBJ'], ['<pch>on', '<toolset>msvc']))
+
+ generators.override('msvc.compile.c.pch', 'pch.default-c-pch-generator')
+ generators.override('msvc.compile.c++.pch', 'pch.default-cpp-pch-generator')
+
+ toolset.flags('msvc.compile', 'PCH_FILE' , ['<pch>on'], ['<pch-file>' ])
+ toolset.flags('msvc.compile', 'PCH_SOURCE', ['<pch>on'], ['<pch-source>'])
+ toolset.flags('msvc.compile', 'PCH_HEADER', ['<pch>on'], ['<pch-header>'])
+
+ #
+ # Declare flags for compilation.
+ #
+ toolset.flags('msvc.compile', 'CFLAGS', ['<optimization>speed'], ['/O2'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<optimization>space'], ['/O1'])
+
+ toolset.flags('msvc.compile', 'CFLAGS', [ a + '/<instruction-set>' + t for a in __cpu_arch_ia64 for t in __cpu_type_itanium ], ['/G1'])
+ toolset.flags('msvc.compile', 'CFLAGS', [ a + '/<instruction-set>' + t for a in __cpu_arch_ia64 for t in __cpu_type_itanium2 ], ['/G2'])
+
+ toolset.flags('msvc.compile', 'CFLAGS', ['<debug-symbols>on/<debug-store>object'], ['/Z7'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<debug-symbols>on/<debug-store>database'], ['/Zi'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<optimization>off'], ['/Od'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<inlining>off'], ['/Ob0'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<inlining>on'], ['/Ob1'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<inlining>full'], ['/Ob2'])
+
+ toolset.flags('msvc.compile', 'CFLAGS', ['<warnings>on'], ['/W3'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<warnings>off'], ['/W0'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<warnings>all'], ['/W4'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<warnings-as-errors>on'], ['/WX'])
+
+ toolset.flags('msvc.compile', 'C++FLAGS', ['<exception-handling>on/<asynch-exceptions>off/<extern-c-nothrow>off'], ['/EHs'])
+ toolset.flags('msvc.compile', 'C++FLAGS', ['<exception-handling>on/<asynch-exceptions>off/<extern-c-nothrow>on'], ['/EHsc'])
+ toolset.flags('msvc.compile', 'C++FLAGS', ['<exception-handling>on/<asynch-exceptions>on/<extern-c-nothrow>off'], ['/EHa'])
+ toolset.flags('msvc.compile', 'C++FLAGS', ['<exception-handling>on/<asynch-exceptions>on/<extern-c-nothrow>on'], ['/EHac'])
+
+ # By default 8.0 enables rtti support while prior versions disabled it. We
+ # simply enable or disable it explicitly so we do not have to depend on this
+ # default behaviour.
+ toolset.flags('msvc.compile', 'CFLAGS', ['<rtti>on'], ['/GR'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<rtti>off'], ['/GR-'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<runtime-debugging>off/<runtime-link>shared'], ['/MD'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<runtime-debugging>on/<runtime-link>shared'], ['/MDd'])
+
+ toolset.flags('msvc.compile', 'CFLAGS', ['<runtime-debugging>off/<runtime-link>static/<threading>multi'], ['/MT'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<runtime-debugging>on/<runtime-link>static/<threading>multi'], ['/MTd'])
+
+ toolset.flags('msvc.compile', 'OPTIONS', [], ['<cflags>'])
+ toolset.flags('msvc.compile.c++', 'OPTIONS', [], ['<cxxflags>'])
+
+ toolset.flags('msvc.compile', 'PDB_CFLAG', ['<debug-symbols>on/<debug-store>database'],['/Fd'])
+
+ toolset.flags('msvc.compile', 'DEFINES', [], ['<define>'])
+ toolset.flags('msvc.compile', 'UNDEFS', [], ['<undef>'])
+ toolset.flags('msvc.compile', 'INCLUDES', [], ['<include>'])
+
+ # Declare flags for the assembler.
+ toolset.flags('msvc.compile.asm', 'USER_ASMFLAGS', [], ['<asmflags>'])
+
+ toolset.flags('msvc.compile.asm', 'ASMFLAGS', ['<debug-symbols>on'], ['/Zi', '/Zd'])
+
+ toolset.flags('msvc.compile.asm', 'ASMFLAGS', ['<warnings>on'], ['/W3'])
+ toolset.flags('msvc.compile.asm', 'ASMFLAGS', ['<warnings>off'], ['/W0'])
+ toolset.flags('msvc.compile.asm', 'ASMFLAGS', ['<warnings>all'], ['/W4'])
+ toolset.flags('msvc.compile.asm', 'ASMFLAGS', ['<warnings-as-errors>on'], ['/WX'])
+
+ toolset.flags('msvc.compile.asm', 'DEFINES', [], ['<define>'])
+
+ # Declare flags for linking.
+ toolset.flags('msvc.link', 'PDB_LINKFLAG', ['<debug-symbols>on/<debug-store>database'], ['/PDB']) # not used yet
+ toolset.flags('msvc.link', 'LINKFLAGS', ['<debug-symbols>on'], ['/DEBUG'])
+ toolset.flags('msvc.link', 'DEF_FILE', [], ['<def-file>'])
+
+ # The linker disables the default optimizations when using /DEBUG so we
+ # have to enable them manually for release builds with debug symbols.
+ toolset.flags('msvc', 'LINKFLAGS', ['<debug-symbols>on/<runtime-debugging>off'], ['/OPT:REF,ICF'])
+
+ toolset.flags('msvc', 'LINKFLAGS', ['<user-interface>console'], ['/subsystem:console'])
+ toolset.flags('msvc', 'LINKFLAGS', ['<user-interface>gui'], ['/subsystem:windows'])
+ toolset.flags('msvc', 'LINKFLAGS', ['<user-interface>wince'], ['/subsystem:windowsce'])
+ toolset.flags('msvc', 'LINKFLAGS', ['<user-interface>native'], ['/subsystem:native'])
+ toolset.flags('msvc', 'LINKFLAGS', ['<user-interface>auto'], ['/subsystem:posix'])
+
+ toolset.flags('msvc.link', 'OPTIONS', [], ['<linkflags>'])
+ toolset.flags('msvc.link', 'LINKPATH', [], ['<library-path>'])
+
+ toolset.flags('msvc.link', 'FINDLIBS_ST', ['<find-static-library>'])
+ toolset.flags('msvc.link', 'FINDLIBS_SA', ['<find-shared-library>'])
+ toolset.flags('msvc.link', 'LIBRARY_OPTION', ['<toolset>msvc'])
+ toolset.flags('msvc.link', 'LIBRARIES_MENTIONED_BY_FILE', ['<library-file>'])
+
+ toolset.flags('msvc.archive', 'AROPTIONS', [], ['<archiveflags>'])
+
+
+# Locates the requested setup script under the given folder and returns its full
+# path or nothing in case the script can not be found. In case multiple scripts
+# are found only the first one is returned.
+#
+# TODO: There used to exist a code comment for the msvc.init rule stating that
+# we do not correctly detect the location of the vcvars32.bat setup script for
+# the free VC7.1 tools in case user explicitly provides a path. This should be
+# tested or simply remove this whole comment in case this toolset version is no
+# longer important.
+#
+def locate_default_setup(command, parent, setup_name):
+ for setup in [os.path.join(dir,setup_name) for dir in [command,parent]]:
+ if os.path.exists(setup):
+ return setup
+ return None
+
+
+# Validates given path, registers found configuration and prints debug
+# information about it.
+#
+def register_configuration(version, path=None):
+ if path:
+ command = os.path.join(path, 'cl.exe')
+ if os.path.exists(command):
+ if debug():
+ print "notice: [msvc-cfg] msvc-$(version) detected, command: ''".format(version,command)
+ __versions.register(version)
+ __versions.set(version,'options',['<command>{}'.format(command)])
+
+
+################################################################################
+#
+# Startup code executed when loading this module.
+#
+################################################################################
+
+# Similar to Configurations, but remembers the first registered configuration.
+class MSVCConfigurations(Configurations):
+ def __init__(self):
+ Configurations.__init__(self)
+ self.first_ = None
+
+ def register(self, id):
+ Configurations.register(self,id)
+ if not self.first_:
+ self.first_ = id
+
+ def first(self):
+ return self.first_
+
+
+# List of all registered configurations.
+__versions = MSVCConfigurations()
+
+# Supported CPU architectures.
+__cpu_arch_i386 = [
+ '<architecture>/<address-model>',
+ '<architecture>/<address-model>32',
+ '<architecture>x86/<address-model>',
+ '<architecture>x86/<address-model>32']
+
+__cpu_arch_amd64 = [
+ '<architecture>/<address-model>64',
+ '<architecture>x86/<address-model>64']
+
+__cpu_arch_ia64 = [
+ '<architecture>ia64/<address-model>',
+ '<architecture>ia64/<address-model>64']
+
+
+# Supported CPU types (only Itanium optimization options are supported from
+# VC++ 2005 on). See
+# http://msdn2.microsoft.com/en-us/library/h66s5s0e(vs.90).aspx for more
+# detailed information.
+__cpu_type_g5 = ['i586', 'pentium', 'pentium-mmx' ]
+__cpu_type_g6 = ['i686', 'pentiumpro', 'pentium2', 'pentium3', 'pentium3m', 'pentium-m', 'k6',
+ 'k6-2', 'k6-3', 'winchip-c6', 'winchip2', 'c3', 'c3-2' ]
+__cpu_type_em64t = ['prescott', 'nocona', 'conroe', 'conroe-xe', 'conroe-l', 'allendale', 'mermon',
+ 'mermon-xe', 'kentsfield', 'kentsfield-xe', 'penryn', 'wolfdale',
+ 'yorksfield', 'nehalem' ]
+__cpu_type_amd64 = ['k8', 'opteron', 'athlon64', 'athlon-fx']
+__cpu_type_g7 = ['pentium4', 'pentium4m', 'athlon', 'athlon-tbird', 'athlon-4', 'athlon-xp'
+ 'athlon-mp'] + __cpu_type_em64t + __cpu_type_amd64
+__cpu_type_itanium = ['itanium', 'itanium1', 'merced']
+__cpu_type_itanium2 = ['itanium2', 'mckinley']
+
+
+# Known toolset versions, in order of preference.
+_known_versions = ['11.0', '10.0', '10.0express', '9.0', '9.0express', '8.0', '8.0express', '7.1', '7.1toolkit', '7.0', '6.0']
+
+# Version aliases.
+__version_alias_6 = '6.0'
+__version_alias_6_5 = '6.0'
+__version_alias_7 = '7.0'
+__version_alias_8 = '8.0'
+__version_alias_9 = '9.0'
+__version_alias_10 = '10.0'
+__version_alias_11 = '11.0'
+
+# Names of registry keys containing the Visual C++ installation path (relative
+# to "HKEY_LOCAL_MACHINE\SOFTWARE\\Microsoft").
+__version_6_0_reg = "VisualStudio\\6.0\\Setup\\Microsoft Visual C++"
+__version_7_0_reg = "VisualStudio\\7.0\\Setup\\VC"
+__version_7_1_reg = "VisualStudio\\7.1\\Setup\\VC"
+__version_8_0_reg = "VisualStudio\\8.0\\Setup\\VC"
+__version_8_0express_reg = "VCExpress\\8.0\\Setup\\VC"
+__version_9_0_reg = "VisualStudio\\9.0\\Setup\\VC"
+__version_9_0express_reg = "VCExpress\\9.0\\Setup\\VC"
+__version_10_0_reg = "VisualStudio\\10.0\\Setup\\VC"
+__version_10_0express_reg = "VCExpress\\10.0\\Setup\\VC"
+__version_11_0_reg = "VisualStudio\\11.0\\Setup\\VC"
+
+# Visual C++ Toolkit 2003 does not store its installation path in the registry.
+# The environment variable 'VCToolkitInstallDir' and the default installation
+# path will be checked instead.
+__version_7_1toolkit_path = 'Microsoft Visual C++ Toolkit 2003\\bin'
+__version_7_1toolkit_env = 'VCToolkitInstallDir'
+
+# Path to the folder containing "cl.exe" relative to the value of the
+# corresponding environment variable.
+__version_7_1toolkit_envpath = 'bin' ;
+#
+#
+# Auto-detect all the available msvc installations on the system.
+auto_detect_toolset_versions()
+
+# And finally trigger the actual Boost Build toolset registration.
+register_toolset()
diff --git a/tools/build/v2/tools/pch.py b/tools/build/v2/tools/pch.py
index 21d3db09df..71cb7166eb 100644
--- a/tools/build/v2/tools/pch.py
+++ b/tools/build/v2/tools/pch.py
@@ -29,6 +29,7 @@
# ;
from b2.build import type, feature, generators
+from b2.tools import builtin
type.register('PCH', ['pch'])
type.register('C_PCH', [], 'PCH')
@@ -48,7 +49,7 @@ class PchGenerator(generators.Generator):
from being run unless it's being used for a top-level PCH target.
"""
def action_class(self):
- return 'compile-action'
+ return builtin.CompileAction
def run(self, project, name, prop_set, sources):
if not name:
@@ -65,7 +66,7 @@ class PchGenerator(generators.Generator):
pass
else:
r = self.run_pch(project, name,
- prop_set.add_raw('<define>BOOST_BUILD_PCH_ENABLED'),
+ prop_set.add_raw(['<define>BOOST_BUILD_PCH_ENABLED']),
sources)
return generators.add_usage_requirements(
r, ['<define>BOOST_BUILD_PCH_ENABLED'])
@@ -74,10 +75,9 @@ class PchGenerator(generators.Generator):
def run_pch(self, project, name, prop_set, sources):
pass
-#FIXME: dummy-generator in builtins.jam needs to be ported.
# NOTE: requirements are empty, default pch generator can be applied when
# pch=off.
-###generators.register(
-### [ new dummy-generator pch.default-c-pch-generator : : C_PCH ] ;
-###generators.register
-### [ new dummy-generator pch.default-cpp-pch-generator : : CPP_PCH ] ;
+generators.register(builtin.DummyGenerator(
+ "pch.default-c-pch-generator", False, [], ['C_PCH'], []))
+generators.register(builtin.DummyGenerator(
+ "pch.default-cpp-pch-generator", False, [], ['CPP_PCH'], []))
diff --git a/tools/build/v2/tools/python.jam b/tools/build/v2/tools/python.jam
index 66f2aabec3..6c2073b788 100644
--- a/tools/build/v2/tools/python.jam
+++ b/tools/build/v2/tools/python.jam
@@ -1178,7 +1178,7 @@ class python-test-generator : generator
property-set = [ $(property-set).add-raw <dependency>$(other-pythons) ] ;
- result = [ construct-result $(python) $(extensions) $(new-sources) :
+ return [ construct-result $(python) $(extensions) $(new-sources) :
$(project) $(name) : $(property-set) ] ;
}
}
diff --git a/tools/build/v2/tools/stage.jam b/tools/build/v2/tools/stage.jam
index 296e7558e4..36427447bf 100644
--- a/tools/build/v2/tools/stage.jam
+++ b/tools/build/v2/tools/stage.jam
@@ -263,7 +263,7 @@ class install-target-class : basic-target
}
}
DELETE_MODULE $(result) ;
- result = [ sequence.unique $(result2) ] ;
+ return [ sequence.unique $(result2) ] ;
}
# Returns true iff 'type' is subtype of some element of 'types-to-include'.
diff --git a/tools/build/v2/tools/stage.py b/tools/build/v2/tools/stage.py
index 25eccbe513..90d3c0f976 100644
--- a/tools/build/v2/tools/stage.py
+++ b/tools/build/v2/tools/stage.py
@@ -224,7 +224,7 @@ def symlink(name, project, source, ps):
return virtual_target.FileTarget(name, source.type(), project, a, exact=True)
def relink_file(project, source, ps):
- action = source.action()
+ action = source[0].action()
cloned_action = virtual_target.clone_action(action, project, "", ps)
targets = cloned_action.targets()
# We relink only on Unix, where exe or shared lib is always a single file.
diff --git a/tools/build/v2/tools/types/__init__.py b/tools/build/v2/tools/types/__init__.py
index f972b71495..9ee31d13a3 100644
--- a/tools/build/v2/tools/types/__init__.py
+++ b/tools/build/v2/tools/types/__init__.py
@@ -5,6 +5,7 @@ __all__ = [
'html',
'lib',
'obj',
+ 'preprocessed',
'rsp',
]
diff --git a/tools/build/v2/tools/types/cpp.py b/tools/build/v2/tools/types/cpp.py
index 7b56111c85..a6703255c6 100644
--- a/tools/build/v2/tools/types/cpp.py
+++ b/tools/build/v2/tools/types/cpp.py
@@ -5,6 +5,9 @@
from b2.build import type
def register ():
- type.register_type ('CPP', ['cpp', 'cxx', 'cc'])
+ type.register_type('CPP', ['cpp', 'cxx', 'cc'])
+ type.register_type('H', ['h'])
+ type.register_type('HPP', ['hpp'], 'H')
+ type.register_type('C', ['c'])
register ()
diff --git a/tools/build/v2/tools/types/preprocessed.py b/tools/build/v2/tools/types/preprocessed.py
new file mode 100644
index 0000000000..f591043347
--- /dev/null
+++ b/tools/build/v2/tools/types/preprocessed.py
@@ -0,0 +1,11 @@
+# Copyright David Abrahams 2004. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+from b2.build import type
+
+def register ():
+ type.register_type('PREPROCESSED_C', ['i'], 'C')
+ type.register_type('PREPROCESSED_CPP', ['ii'], 'CPP')
+
+register ()
diff --git a/tools/build/v2/tools/unix.py b/tools/build/v2/tools/unix.py
index d409c2e460..34758f57b5 100644
--- a/tools/build/v2/tools/unix.py
+++ b/tools/build/v2/tools/unix.py
@@ -58,8 +58,8 @@ class UnixSearchedLibGenerator (builtin.SearchedLibGenerator):
def optional_properties (self):
return self.requirements ()
- def run (self, project, name, prop_set, sources, multiple):
- result = SearchedLibGenerator.run (project, name, prop_set, sources, multiple)
+ def run (self, project, name, prop_set, sources):
+ result = SearchedLibGenerator.run (project, name, prop_set, sources)
set_library_order (sources, prop_set, result)
@@ -69,10 +69,10 @@ class UnixPrebuiltLibGenerator (generators.Generator):
def __init__ (self, id, composing, source_types, target_types_and_names, requirements):
generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
- def run (self, project, name, prop_set, sources, multiple):
+ def run (self, project, name, prop_set, sources):
f = prop_set.get ('<file>')
set_library_order_aux (f, sources)
- return (f, sources)
+ return f + sources
### # The derived toolset must specify their own rules and actions.
# FIXME: restore?