summaryrefslogtreecommitdiff
path: root/tools
diff options
context:
space:
mode:
authorDongHun Kwak <dh0128.kwak@samsung.com>2016-10-06 10:33:54 +0900
committerDongHun Kwak <dh0128.kwak@samsung.com>2016-10-06 10:36:09 +0900
commitd9ec475d945d3035377a0d89ed42e382d8988891 (patch)
tree34aff2cee4b209906243ab5499d61f3edee2982f /tools
parent71d216b90256936a9638f325af9bc69d720e75de (diff)
downloadboost-d9ec475d945d3035377a0d89ed42e382d8988891.tar.gz
boost-d9ec475d945d3035377a0d89ed42e382d8988891.tar.bz2
boost-d9ec475d945d3035377a0d89ed42e382d8988891.zip
Imported Upstream version 1.60.0
Change-Id: Ie709530d6d5841088ceaba025cbe175a4ef43050 Signed-off-by: DongHun Kwak <dh0128.kwak@samsung.com>
Diffstat (limited to 'tools')
-rw-r--r--tools/build/doc/bjam.qbk3
-rw-r--r--tools/build/doc/jamfile.jam2
-rw-r--r--tools/build/doc/src/install.xml4
-rw-r--r--tools/build/example/build-id/Jamroot.jam0
-rw-r--r--tools/build/example/customization/inline_file.py8
-rw-r--r--tools/build/example/customization/verbatim.py6
-rw-r--r--tools/build/src/build-system.jam43
-rwxr-xr-xtools/build/src/build/alias.py26
-rw-r--r--tools/build/src/build/build-request.jam63
-rw-r--r--tools/build/src/build/build_request.py76
-rw-r--r--tools/build/src/build/configure.py30
-rw-r--r--tools/build/src/build/engine.py102
-rw-r--r--tools/build/src/build/errors.py22
-rw-r--r--tools/build/src/build/feature.py329
-rw-r--r--tools/build/src/build/generators.py385
-rw-r--r--tools/build/src/build/project.py146
-rw-r--r--tools/build/src/build/property.py200
-rw-r--r--tools/build/src/build/property_set.py24
-rw-r--r--tools/build/src/build/scanner.py47
-rw-r--r--tools/build/src/build/targets.py505
-rw-r--r--tools/build/src/build/toolset.py175
-rw-r--r--tools/build/src/build/type.py102
-rw-r--r--tools/build/src/build/virtual_target.py83
-rw-r--r--tools/build/src/build_system.py25
-rw-r--r--tools/build/src/contrib/boost.jam1
-rw-r--r--tools/build/src/contrib/boost.py31
-rw-r--r--tools/build/src/engine/build.bat2
-rw-r--r--tools/build/src/engine/build.jam6
-rw-r--r--tools/build/src/engine/builtins.c114
-rw-r--r--tools/build/src/engine/jam.c18
-rw-r--r--tools/build/src/kernel/bootstrap.py4
-rw-r--r--tools/build/src/manager.py16
-rw-r--r--tools/build/src/tools/builtin.py192
-rw-r--r--tools/build/src/tools/cast.py23
-rw-r--r--tools/build/src/tools/common.py149
-rw-r--r--tools/build/src/tools/darwin.py4
-rw-r--r--tools/build/src/tools/doxproc.py118
-rw-r--r--tools/build/src/tools/gcc.py6
-rw-r--r--tools/build/src/tools/intel-win.jam1
-rw-r--r--tools/build/src/tools/make.py22
-rw-r--r--tools/build/src/tools/mc.py2
-rw-r--r--tools/build/src/tools/message.py6
-rw-r--r--tools/build/src/tools/midl.py18
-rw-r--r--tools/build/src/tools/msvc.jam9
-rw-r--r--tools/build/src/tools/msvc.py62
-rw-r--r--tools/build/src/tools/package.py14
-rw-r--r--tools/build/src/tools/rc.py26
-rw-r--r--tools/build/src/tools/stage.py26
-rw-r--r--tools/build/src/tools/symlink.py22
-rw-r--r--tools/build/src/tools/testing.py28
-rw-r--r--tools/build/src/tools/unix.py46
-rw-r--r--tools/build/src/util/__init__.py155
-rw-r--r--tools/build/src/util/indirect.py2
-rw-r--r--tools/build/src/util/logger.py12
-rw-r--r--tools/build/src/util/option.py4
-rw-r--r--tools/build/src/util/order.py26
-rw-r--r--tools/build/src/util/path.py251
-rw-r--r--tools/build/src/util/sequence.py8
-rw-r--r--tools/build/src/util/set.py10
-rw-r--r--tools/build/src/util/utility.py20
-rw-r--r--tools/build/test/alias.py2
-rw-r--r--tools/build/test/cli_property_expansion.py41
-rw-r--r--tools/build/test/composite.py6
-rwxr-xr-xtools/build/test/core_bindrule.py6
-rw-r--r--tools/build/test/core_dependencies.py20
-rwxr-xr-xtools/build/test/core_language.py6
-rw-r--r--tools/build/test/custom_generator.py18
-rw-r--r--tools/build/test/ordered_include.py12
-rw-r--r--tools/build/test/print.py8
-rwxr-xr-xtools/build/test/qt4.py4
-rwxr-xr-xtools/build/test/qt5.py4
-rw-r--r--tools/build/test/relative_sources.py2
-rw-r--r--tools/build/test/test1.py6
-rwxr-xr-xtools/build/test/testing_support.py2
74 files changed, 2495 insertions, 1502 deletions
diff --git a/tools/build/doc/bjam.qbk b/tools/build/doc/bjam.qbk
index d54839182c..f17e74f8f5 100644
--- a/tools/build/doc/bjam.qbk
+++ b/tools/build/doc/bjam.qbk
@@ -71,7 +71,7 @@ cd /jam source location/
sh ./build.sh
]
-For the Boost.Jam source included with the Boost distribution the /jam source location/ is =BOOST_ROOT/tools/build/v2/engine=.
+For the Boost.Jam source included with the Boost distribution the /jam source location/ is =BOOST_ROOT/tools/build/src/engine=.
If the scripts fail to detect an appropriate toolset to build with your particular toolset may not be auto-detectable. In that case, you can specify the toolset as the first argument, this assumes that the toolset is readily available in the =PATH=.
@@ -841,6 +841,7 @@ rule SHELL ( /command/ : * )
[variablelist
[[=exit-status=] [In addition to the output the result status of the executed command is returned as a second element of the result.]]
[[=no-output=] [Don't capture the output of the command. Instead an empty ("") string value is returned in place of the output.]]
+ [[=strip-eol=] [Remove trailing end-of-line character from output, if any.]]
]
Because the Perforce/Jambase defines a =SHELL= rule which hides the
diff --git a/tools/build/doc/jamfile.jam b/tools/build/doc/jamfile.jam
index 22d67e62ae..e61017d651 100644
--- a/tools/build/doc/jamfile.jam
+++ b/tools/build/doc/jamfile.jam
@@ -5,7 +5,7 @@
import quickbook ;
using boostbook ;
-project tools/build/v2/doc
+project tools/build/doc
;
boostbook userman : src/standalone.xml
diff --git a/tools/build/doc/src/install.xml b/tools/build/doc/src/install.xml
index 40c8b5ccbd..8a272c73dd 100644
--- a/tools/build/doc/src/install.xml
+++ b/tools/build/doc/src/install.xml
@@ -45,7 +45,7 @@
<para>If you are not using a Boost.Build package, but rather the version
bundled with the Boost C++ Libraries, the above commands should be run
- in the <filename>tools/build/v2</filename> directory.</para>
+ in the <filename>tools/build</filename> directory.</para>
<para>
Now that Boost.Build is installed, you can try some of the examples. Copy
@@ -97,7 +97,7 @@
Boost.Build release package, except for
<filename>jam_src</filename> directory. If you're using Boost CVS
to obtain Boost.Build, as opposed to release package, take
- everything from the <filename>tools/build/v2</filename> directory.
+ everything from the <filename>tools/build</filename> directory.
For a check, make sure that
<filename>/usr/share/boost-build/boost-build.jam</filename> is installed.
</para>
diff --git a/tools/build/example/build-id/Jamroot.jam b/tools/build/example/build-id/Jamroot.jam
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/tools/build/example/build-id/Jamroot.jam
diff --git a/tools/build/example/customization/inline_file.py b/tools/build/example/customization/inline_file.py
index a48c5fc9d9..9f13acd874 100644
--- a/tools/build/example/customization/inline_file.py
+++ b/tools/build/example/customization/inline_file.py
@@ -1,8 +1,8 @@
#!/usr/bin/python
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
import sys
from string import strip
@@ -38,7 +38,7 @@ else:
file_to_include = sys.argv[2]
in_file = open(file_to_include, "r");
- variable_name = strip(in_file.readline())
+ variable_name = strip(in_file.readline())
out_file.write("extern const char %s[] = {\n%s};\n\n" % (variable_name, quote_file(in_file)))
in_file.close()
out_file.close()
diff --git a/tools/build/example/customization/verbatim.py b/tools/build/example/customization/verbatim.py
index be285976c1..089bd38316 100644
--- a/tools/build/example/customization/verbatim.py
+++ b/tools/build/example/customization/verbatim.py
@@ -1,6 +1,6 @@
-# Copyright 2010 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2010 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# This file is only used with Python port of Boost.Build
diff --git a/tools/build/src/build-system.jam b/tools/build/src/build-system.jam
index 247326a96f..7b783336e7 100644
--- a/tools/build/src/build-system.jam
+++ b/tools/build/src/build-system.jam
@@ -585,26 +585,6 @@ local rule should-clean-project ( project )
local properties = [ $(build-request).get-at 2 ] ;
- # Expand properties specified on the command line into multiple property
- # sets consisting of all legal property combinations. Each expanded property
- # set will be used for a single build run. E.g. if multiple toolsets are
- # specified then requested targets will be built with each of them.
- if $(properties)
- {
- expanded = [ build-request.expand-no-defaults $(properties) ] ;
- local xexpanded ;
- for local e in $(expanded)
- {
- xexpanded += [ property-set.create [ feature.split $(e) ] ] ;
- }
- expanded = $(xexpanded) ;
- }
- else
- {
- expanded = [ property-set.empty ] ;
- }
-
-
# Check that we actually found something to build.
if ! $(current-project) && ! $(target-ids)
{
@@ -695,6 +675,29 @@ local rule should-clean-project ( project )
configure.set-log-file $(first-build-build-dir)/config.log ;
config-cache.load $(first-build-build-dir)/project-cache.jam ;
+ # Expand properties specified on the command line into multiple property
+ # sets consisting of all legal property combinations. Each expanded property
+ # set will be used for a single build run. E.g. if multiple toolsets are
+ # specified then requested targets will be built with each of them.
+ # The expansion is being performed as late as possible so that the feature
+ # validation is performed after all necessary modules (including project targets
+ # on the command line) have been loaded.
+ if $(properties)
+ {
+ expanded += [ build-request.convert-command-line-elements $(properties) ] ;
+ expanded = [ build-request.expand-no-defaults $(expanded) ] ;
+ local xexpanded ;
+ for local e in $(expanded)
+ {
+ xexpanded += [ property-set.create [ feature.split $(e) ] ] ;
+ }
+ expanded = $(xexpanded) ;
+ }
+ else
+ {
+ expanded = [ property-set.empty ] ;
+ }
+
# Now that we have a set of targets to build and a set of property sets to
# build the targets with, we can start the main build process by using each
# property set to generate virtual targets from all of our listed targets
diff --git a/tools/build/src/build/alias.py b/tools/build/src/build/alias.py
index 575e53609d..e9078c746c 100755
--- a/tools/build/src/build/alias.py
+++ b/tools/build/src/build/alias.py
@@ -1,13 +1,13 @@
-# Copyright 2003, 2004, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2003, 2004, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Status: ported (danielw)
# Base revision: 56043
# This module defines the 'alias' rule and associated class.
#
-# Alias is just a main target which returns its source targets without any
+# Alias is just a main target which returns its source targets without any
# processing. For example::
#
# alias bin : hello test_hello ;
@@ -18,7 +18,7 @@
# alias platform-src : win.cpp : <os>NT ;
# alias platform-src : linux.cpp : <os>LINUX ;
# exe main : main.cpp platform-src ;
-#
+#
# Lastly, it's possible to create local alias for some target, with different
# properties::
#
@@ -29,7 +29,7 @@ import targets
import property_set
from b2.manager import get_manager
-from b2.util import metatarget
+from b2.util import metatarget, is_iterable_typed
class AliasTarget(targets.BasicTarget):
@@ -37,9 +37,17 @@ class AliasTarget(targets.BasicTarget):
targets.BasicTarget.__init__(self, *args)
def construct(self, name, source_targets, properties):
+ if __debug__:
+ from .virtual_target import VirtualTarget
+ assert isinstance(name, basestring)
+ assert is_iterable_typed(source_targets, VirtualTarget)
+ assert isinstance(properties, property_set.PropertySet)
return [property_set.empty(), source_targets]
def compute_usage_requirements(self, subvariant):
+ if __debug__:
+ from .virtual_target import Subvariant
+ assert isinstance(subvariant, Subvariant)
base = targets.BasicTarget.compute_usage_requirements(self, subvariant)
# Add source's usage requirement. If we don't do this, "alias" does not
# look like 100% alias.
@@ -47,7 +55,11 @@ class AliasTarget(targets.BasicTarget):
@metatarget
def alias(name, sources=[], requirements=[], default_build=[], usage_requirements=[]):
-
+ assert isinstance(name, basestring)
+ assert is_iterable_typed(sources, basestring)
+ assert is_iterable_typed(requirements, basestring)
+ assert is_iterable_typed(default_build, basestring)
+ assert is_iterable_typed(usage_requirements, basestring)
project = get_manager().projects().current()
targets = get_manager().targets()
diff --git a/tools/build/src/build/build-request.jam b/tools/build/src/build/build-request.jam
index 2a1bbb467c..3110713b70 100644
--- a/tools/build/src/build/build-request.jam
+++ b/tools/build/src/build/build-request.jam
@@ -150,8 +150,7 @@ rule from-command-line ( command-line * )
if [ MATCH "(.*=.*)" : $(e) ]
|| [ looks-like-implicit-value $(e:D=) : $(feature-space) ]
{
- properties += [ convert-command-line-element $(e) :
- $(feature-space) ] ;
+ properties += $(e) ;
}
else if $(e)
{
@@ -169,9 +168,22 @@ rule from-command-line ( command-line * )
}
-# Converts one element of command line build request specification into internal
+# Converts a list of elements of command line build request specification into internal
# form. Expects all the project files to already be loaded.
#
+rule convert-command-line-elements ( elements * )
+{
+ local result ;
+ for local e in $(elements)
+ {
+ result += [ convert-command-line-element $(e) ] ;
+ }
+ return $(result) ;
+}
+
+
+# Converts one element of command line build request specification into internal
+# form.
local rule convert-command-line-element ( e )
{
local result ;
@@ -286,37 +298,60 @@ rule __test__ ( )
local r ;
- r = [ build-request.from-command-line bjam debug runtime-link=dynamic ] ;
- assert.equal [ $(r).get-at 1 ] : ;
- assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ;
-
try ;
{
- build-request.from-command-line bjam gcc/debug runtime-link=dynamic/static ;
+ r = [ build-request.from-command-line bjam gcc/debug runtime-link=dynamic/static ] ;
+ build-request.convert-command-line-elements [ $(r).get-at 2 ] ;
}
catch \"static\" is not an implicit feature value ;
+ r = [ build-request.from-command-line bjam debug runtime-link=dynamic ] ;
+ assert.equal [ $(r).get-at 1 ] : ;
+ assert.equal [ $(r).get-at 2 ] : debug runtime-link=dynamic ;
+
+ assert.equal
+ [ build-request.convert-command-line-elements debug runtime-link=dynamic ]
+ : debug <runtime-link>dynamic ;
+
r = [ build-request.from-command-line bjam -d2 --debug debug target runtime-link=dynamic ] ;
assert.equal [ $(r).get-at 1 ] : target ;
- assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ;
+ assert.equal [ $(r).get-at 2 ] : debug runtime-link=dynamic ;
+
+ assert.equal
+ [ build-request.convert-command-line-elements debug runtime-link=dynamic ]
+ : debug <runtime-link>dynamic ;
r = [ build-request.from-command-line bjam debug runtime-link=dynamic,static ] ;
assert.equal [ $(r).get-at 1 ] : ;
- assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic <runtime-link>static ;
+ assert.equal [ $(r).get-at 2 ] : debug runtime-link=dynamic,static ;
+
+ assert.equal
+ [ build-request.convert-command-line-elements debug runtime-link=dynamic,static ]
+ : debug <runtime-link>dynamic <runtime-link>static ;
r = [ build-request.from-command-line bjam debug gcc/runtime-link=dynamic,static ] ;
assert.equal [ $(r).get-at 1 ] : ;
- assert.equal [ $(r).get-at 2 ] : debug gcc/<runtime-link>dynamic
- gcc/<runtime-link>static ;
+ assert.equal [ $(r).get-at 2 ] : debug gcc/runtime-link=dynamic,static ;
+
+ assert.equal
+ [ build-request.convert-command-line-elements debug gcc/runtime-link=dynamic,static ]
+ : debug gcc/<runtime-link>dynamic gcc/<runtime-link>static ;
r = [ build-request.from-command-line bjam msvc gcc,borland/runtime-link=static ] ;
assert.equal [ $(r).get-at 1 ] : ;
- assert.equal [ $(r).get-at 2 ] : msvc gcc/<runtime-link>static
- borland/<runtime-link>static ;
+ assert.equal [ $(r).get-at 2 ] : msvc gcc,borland/runtime-link=static ;
+
+ assert.equal
+ [ build-request.convert-command-line-elements msvc gcc,borland/runtime-link=static ]
+ : msvc gcc/<runtime-link>static borland/<runtime-link>static ;
r = [ build-request.from-command-line bjam gcc-3.0 ] ;
assert.equal [ $(r).get-at 1 ] : ;
assert.equal [ $(r).get-at 2 ] : gcc-3.0 ;
+ assert.equal
+ [ build-request.convert-command-line-elements gcc-3.0 ]
+ : gcc-3.0 ;
+
feature.finish-test build-request-test-temp ;
}
diff --git a/tools/build/src/build/build_request.py b/tools/build/src/build/build_request.py
index 118033e1e1..1942516887 100644
--- a/tools/build/src/build/build_request.py
+++ b/tools/build/src/build/build_request.py
@@ -11,18 +11,20 @@ import b2.build.feature
feature = b2.build.feature
from b2.util.utility import *
+from b2.util import is_iterable_typed
import b2.build.property_set as property_set
def expand_no_defaults (property_sets):
""" Expand the given build request by combining all property_sets which don't
specify conflicting non-free features.
"""
+ assert is_iterable_typed(property_sets, property_set.PropertySet)
# First make all features and subfeatures explicit
expanded_property_sets = [ps.expand_subfeatures() for ps in property_sets]
-
+
# Now combine all of the expanded property_sets
product = __x_product (expanded_property_sets)
-
+
return [property_set.create(p) for p in product]
@@ -30,6 +32,7 @@ def __x_product (property_sets):
""" Return the cross-product of all elements of property_sets, less any
that would contain conflicting values for single-valued features.
"""
+ assert is_iterable_typed(property_sets, property_set.PropertySet)
x_product_seen = set()
return __x_product_aux (property_sets, x_product_seen)[0]
@@ -42,8 +45,10 @@ def __x_product_aux (property_sets, seen_features):
Returns a tuple of:
- list of lists of Property instances, such that within each list, no two Property instance
have the same feature, and no Property is for feature in seen_features.
- - set of features we saw in property_sets
+ - set of features we saw in property_sets
"""
+ assert is_iterable_typed(property_sets, property_set.PropertySet)
+ assert isinstance(seen_features, set)
if not property_sets:
return ([], set())
@@ -76,7 +81,7 @@ def __x_product_aux (property_sets, seen_features):
result.append(properties + inner)
else:
result.append(properties)
-
+
if inner_seen & these_features:
# Some of elements in property_sets[1:] conflict with elements of property_sets[0],
# Try again, this time omitting elements of property_sets[0]
@@ -85,11 +90,12 @@ def __x_product_aux (property_sets, seen_features):
return (result, inner_seen | these_features)
-
+
def looks_like_implicit_value(v):
"""Returns true if 'v' is either implicit value, or
the part before the first '-' symbol is implicit value."""
+ assert isinstance(v, basestring)
if feature.is_implicit_value(v):
return 1
else:
@@ -104,7 +110,7 @@ def from_command_line(command_line):
and constructs build request from it. Returns a list of two
lists. First is the set of targets specified in the command line,
and second is the set of requested build properties."""
-
+ assert is_iterable_typed(command_line, basestring)
targets = []
properties = []
@@ -112,17 +118,17 @@ def from_command_line(command_line):
if e[:1] != "-":
# Build request spec either has "=" in it, or completely
# consists of implicit feature values.
- if e.find("=") != -1 or looks_like_implicit_value(e.split("/")[0]):
- properties += convert_command_line_element(e)
+ if e.find("=") != -1 or looks_like_implicit_value(e.split("/")[0]):
+ properties.append(e)
elif e:
targets.append(e)
return [targets, properties]
-
+
# Converts one element of command line build request specification into
# internal form.
def convert_command_line_element(e):
-
+ assert isinstance(e, basestring)
result = None
parts = e.split("/")
for p in parts:
@@ -133,7 +139,7 @@ def convert_command_line_element(e):
lresult = [("<%s>%s" % (feature, v)) for v in values]
else:
lresult = p.split(",")
-
+
if p.find('-') == -1:
# FIXME: first port property.validate
# property.validate cannot handle subfeatures,
@@ -149,68 +155,68 @@ def convert_command_line_element(e):
return [property_set.create(b2.build.feature.split(r)) for r in result]
-###
+###
### rule __test__ ( )
### {
### import assert feature ;
-###
+###
### feature.prepare-test build-request-test-temp ;
-###
+###
### import build-request ;
### import build-request : expand_no_defaults : build-request.expand_no_defaults ;
### import errors : try catch ;
### import feature : feature subfeature ;
-###
+###
### feature toolset : gcc msvc borland : implicit ;
### subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4
### 3.0 3.0.1 3.0.2 : optional ;
-###
+###
### feature variant : debug release : implicit composite ;
### feature inlining : on off ;
### feature "include" : : free ;
-###
+###
### feature stdlib : native stlport : implicit ;
-###
+###
### feature runtime-link : dynamic static : symmetric ;
-###
-###
+###
+###
### local r ;
-###
-### r = [ build-request.from-command-line bjam debug runtime-link=dynamic ] ;
+###
+### r = [ build-request.from-command-line bjam debug runtime-link=dynamic ] ;
### assert.equal [ $(r).get-at 1 ] : ;
### assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ;
-###
+###
### try ;
### {
-###
+###
### build-request.from-command-line bjam gcc/debug runtime-link=dynamic/static ;
### }
### catch \"static\" is not a value of an implicit feature ;
-###
-###
+###
+###
### r = [ build-request.from-command-line bjam -d2 --debug debug target runtime-link=dynamic ] ;
### assert.equal [ $(r).get-at 1 ] : target ;
### assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ;
-###
+###
### r = [ build-request.from-command-line bjam debug runtime-link=dynamic,static ] ;
### assert.equal [ $(r).get-at 1 ] : ;
### assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic <runtime-link>static ;
-###
+###
### r = [ build-request.from-command-line bjam debug gcc/runtime-link=dynamic,static ] ;
### assert.equal [ $(r).get-at 1 ] : ;
-### assert.equal [ $(r).get-at 2 ] : debug gcc/<runtime-link>dynamic
+### assert.equal [ $(r).get-at 2 ] : debug gcc/<runtime-link>dynamic
### gcc/<runtime-link>static ;
-###
+###
### r = [ build-request.from-command-line bjam msvc gcc,borland/runtime-link=static ] ;
### assert.equal [ $(r).get-at 1 ] : ;
-### assert.equal [ $(r).get-at 2 ] : msvc gcc/<runtime-link>static
+### assert.equal [ $(r).get-at 2 ] : msvc gcc/<runtime-link>static
### borland/<runtime-link>static ;
-###
+###
### r = [ build-request.from-command-line bjam gcc-3.0 ] ;
### assert.equal [ $(r).get-at 1 ] : ;
### assert.equal [ $(r).get-at 2 ] : gcc-3.0 ;
-###
+###
### feature.finish-test build-request-test-temp ;
### }
-###
-###
+###
+###
diff --git a/tools/build/src/build/configure.py b/tools/build/src/build/configure.py
index 0426832c40..10afb82098 100644
--- a/tools/build/src/build/configure.py
+++ b/tools/build/src/build/configure.py
@@ -10,17 +10,17 @@
# This module defines function to help with two main tasks:
#
# - Discovering build-time configuration for the purposes of adjusting
-# build process.
+# build process.
# - Reporting what is built, and how it is configured.
import b2.build.property as property
import b2.build.property_set as property_set
-import b2.build.targets
+from b2.build import targets as targets_
from b2.manager import get_manager
from b2.util.sequence import unique
-from b2.util import bjam_signature, value_to_jam
+from b2.util import bjam_signature, value_to_jam, is_iterable
import bjam
import os
@@ -41,17 +41,22 @@ __log_fd = -1
def register_components(components):
"""Declare that the components specified by the parameter exist."""
+ assert is_iterable(components)
__components.extend(components)
-
+
def components_building(components):
"""Declare that the components specified by the parameters will be build."""
+ assert is_iterable(components)
__built_components.extend(components)
def log_component_configuration(component, message):
"""Report something about component configuration that the user should better know."""
+ assert isinstance(component, basestring)
+ assert isinstance(message, basestring)
__component_logs.setdefault(component, []).append(message)
def log_check_result(result):
+ assert isinstance(result, basestring)
global __announced_checks
if not __announced_checks:
print "Performing configuration checks"
@@ -60,7 +65,9 @@ def log_check_result(result):
print result
def log_library_search_result(library, result):
- log_check_result((" - %(library)s : %(result)s" % locals()).rjust(width))
+ assert isinstance(library, basestring)
+ assert isinstance(result, basestring)
+ log_check_result((" - %(library)s : %(result)s" % locals()).rjust(__width))
def print_component_configuration():
@@ -84,6 +91,10 @@ def builds(metatarget_reference, project, ps, what):
# Attempt to build a metatarget named by 'metatarget-reference'
# in context of 'project' with properties 'ps'.
# Returns non-empty value if build is OK.
+ assert isinstance(metatarget_reference, basestring)
+ assert isinstance(project, targets_.ProjectTarget)
+ assert isinstance(ps, property_set.PropertySet)
+ assert isinstance(what, basestring)
result = []
@@ -93,12 +104,12 @@ def builds(metatarget_reference, project, ps, what):
result = False
__builds_cache[(what, ps)] = False
- targets = b2.build.targets.generate_from_reference(
+ targets = targets_.generate_from_reference(
metatarget_reference, project, ps).targets()
jam_targets = []
for t in targets:
jam_targets.append(t.actualize())
-
+
x = (" - %s" % what).rjust(__width)
if bjam.call("UPDATE_NOW", jam_targets, str(__log_fd), "ignore-minus-n"):
__builds_cache[(what, ps)] = True
@@ -112,6 +123,7 @@ def builds(metatarget_reference, project, ps, what):
return existing
def set_log_file(log_file_name):
+ assert isinstance(log_file_name, basestring)
# Called by Boost.Build startup code to specify name of a file
# that will receive results of configure checks. This
# should never be called by users.
@@ -134,7 +146,7 @@ class CheckTargetBuildsWorker:
self.false_properties = property.create_from_strings(false_properties, True)
def check(self, ps):
-
+ assert isinstance(ps, property_set.PropertySet)
# FIXME: this should not be hardcoded. Other checks might
# want to consider different set of features as relevant.
toolset = ps.get('toolset')[0]
@@ -146,7 +158,7 @@ class CheckTargetBuildsWorker:
ps.get_properties("architecture")
rps = property_set.create(relevant)
t = get_manager().targets().current()
- p = t.project()
+ p = t.project()
if builds(self.target, p, rps, "%s builds" % self.target):
choosen = self.true_properties
else:
diff --git a/tools/build/src/build/engine.py b/tools/build/src/build/engine.py
index 35333eaa00..4c2c97eaf3 100644
--- a/tools/build/src/build/engine.py
+++ b/tools/build/src/build/engine.py
@@ -10,46 +10,50 @@ import operator
import re
import b2.build.property_set as property_set
-import b2.util
-class BjamAction:
+from b2.util import set_jam_action, is_iterable
+
+class BjamAction(object):
"""Class representing bjam action defined from Python."""
-
+
def __init__(self, action_name, function):
+ assert isinstance(action_name, basestring)
+ assert callable(function) or function is None
self.action_name = action_name
self.function = function
-
- def __call__(self, targets, sources, property_set):
+ def __call__(self, targets, sources, property_set_):
+ assert is_iterable(targets)
+ assert is_iterable(sources)
+ assert isinstance(property_set_, property_set.PropertySet)
# Bjam actions defined from Python have only the command
# to execute, and no associated jam procedural code. So
# passing 'property_set' to it is not necessary.
bjam_interface.call("set-update-action", self.action_name,
targets, sources, [])
if self.function:
- self.function(targets, sources, property_set)
+ self.function(targets, sources, property_set_)
-class BjamNativeAction:
+class BjamNativeAction(BjamAction):
"""Class representing bjam action defined by Jam code.
We still allow to associate a Python callable that will
be called when this action is installed on any target.
"""
-
- def __init__(self, action_name, function):
- self.action_name = action_name
- self.function = function
-
- def __call__(self, targets, sources, property_set):
+
+ def __call__(self, targets, sources, property_set_):
+ assert is_iterable(targets)
+ assert is_iterable(sources)
+ assert isinstance(property_set_, property_set.PropertySet)
if self.function:
- self.function(targets, sources, property_set)
-
+ self.function(targets, sources, property_set_)
+
p = []
if property_set:
- p = property_set.raw()
+ p = property_set_.raw()
+
+ set_jam_action(self.action_name, targets, sources, p)
- b2.util.set_jam_action(self.action_name, targets, sources, p)
-
action_modifiers = {"updated": 0x01,
"together": 0x02,
"ignore": 0x04,
@@ -77,6 +81,8 @@ class Engine:
targets = [targets]
if isinstance (sources, str):
sources = [sources]
+ assert is_iterable(targets)
+ assert is_iterable(sources)
for target in targets:
for source in sources:
@@ -105,6 +111,11 @@ class Engine:
echo [ on $(targets) return $(MY-VAR) ] ;
"Hello World"
"""
+ if isinstance(targets, str):
+ targets = [targets]
+ assert is_iterable(targets)
+ assert isinstance(variable, basestring)
+
return bjam_interface.call('get-target-variable', targets, variable)
def set_target_variable (self, targets, variable, value, append=0):
@@ -114,13 +125,19 @@ class Engine:
where to generate targets, and will also be available to
updating rule for that 'taret'.
"""
- if isinstance (targets, str):
+ if isinstance (targets, str):
targets = [targets]
+ if isinstance(value, str):
+ value = [value]
+
+ assert is_iterable(targets)
+ assert isinstance(variable, basestring)
+ assert is_iterable(value)
for target in targets:
self.do_set_target_variable (target, variable, value, append)
- def set_update_action (self, action_name, targets, sources, properties=property_set.empty()):
+ def set_update_action (self, action_name, targets, sources, properties=None):
""" Binds a target to the corresponding update action.
If target needs to be updated, the action registered
with action_name will be used.
@@ -128,9 +145,17 @@ class Engine:
either 'register_action' or 'register_bjam_action'
method.
"""
- assert(isinstance(properties, property_set.PropertySet))
- if isinstance (targets, str):
+ if isinstance(targets, str):
targets = [targets]
+ if isinstance(sources, str):
+ sources = [sources]
+ if properties is None:
+ properties = property_set.empty()
+ assert isinstance(action_name, basestring)
+ assert is_iterable(targets)
+ assert is_iterable(sources)
+ assert(isinstance(properties, property_set.PropertySet))
+
self.do_set_update_action (action_name, targets, sources, properties)
def register_action (self, action_name, command, bound_list = [], flags = [],
@@ -149,10 +174,11 @@ class Engine:
This function will be called by set_update_action, and can
set additional target variables.
"""
- if self.actions.has_key(action_name):
- raise "Bjam action %s is already defined" % action_name
-
- assert(isinstance(flags, list))
+ assert isinstance(action_name, basestring)
+ assert isinstance(command, basestring)
+ assert is_iterable(bound_list)
+ assert is_iterable(flags)
+ assert function is None or callable(function)
bjam_flags = reduce(operator.or_,
(action_modifiers[flag] for flag in flags), 0)
@@ -178,25 +204,37 @@ class Engine:
# action name. This way, jamfile rules that take action names
# can just register them without specially checking if
# action is already registered.
+ assert isinstance(action_name, basestring)
+ assert function is None or callable(function)
if not self.actions.has_key(action_name):
self.actions[action_name] = BjamNativeAction(action_name, function)
-
+
# Overridables
- def do_set_update_action (self, action_name, targets, sources, property_set):
+ def do_set_update_action (self, action_name, targets, sources, property_set_):
+ assert isinstance(action_name, basestring)
+ assert is_iterable(targets)
+ assert is_iterable(sources)
+ assert isinstance(property_set_, property_set.PropertySet)
action = self.actions.get(action_name)
if not action:
raise Exception("No action %s was registered" % action_name)
- action(targets, sources, property_set)
+ action(targets, sources, property_set_)
def do_set_target_variable (self, target, variable, value, append):
+ assert isinstance(target, basestring)
+ assert isinstance(variable, basestring)
+ assert is_iterable(value)
+ assert isinstance(append, int) # matches bools
if append:
bjam_interface.call("set-target-variable", target, variable, value, "true")
else:
bjam_interface.call("set-target-variable", target, variable, value)
-
+
def do_add_dependency (self, target, source):
+ assert isinstance(target, basestring)
+ assert isinstance(source, basestring)
bjam_interface.call("DEPENDS", target, source)
-
-
+
+
diff --git a/tools/build/src/build/errors.py b/tools/build/src/build/errors.py
index d9dceefe08..69d8a37d30 100644
--- a/tools/build/src/build/errors.py
+++ b/tools/build/src/build/errors.py
@@ -1,8 +1,8 @@
# Status: being written afresh by Vladimir Prus
-# Copyright 2007 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2007 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# This file is supposed to implement error reporting for Boost.Build.
# Experience with jam version has shown that printing full backtrace
@@ -22,7 +22,7 @@ import sys
def format(message, prefix=""):
parts = str(message).split("\n")
return "\n".join(prefix+p for p in parts)
-
+
class Context:
@@ -71,7 +71,7 @@ class ExceptionWithUserContext(Exception):
traceback.print_tb(self.original_tb_)
elif self.stack_:
for l in traceback.format_list(self.stack_):
- print l,
+ print l,
else:
print " use the '--stacktrace' option to get Python stacktrace"
print
@@ -87,9 +87,9 @@ def user_error_checkpoint(callable):
errors.handle_stray_exception(e)
finally:
errors.pop_user_context()
-
+
return wrapper
-
+
class Errors:
def __init__(self):
@@ -116,12 +116,12 @@ class Errors:
def handle_stray_exception(self, e):
raise ExceptionWithUserContext("unexpected exception", self.contexts_[:],
- e, sys.exc_info()[2])
+ e, sys.exc_info()[2])
def __call__(self, message):
self._count = self._count + 1
- raise ExceptionWithUserContext(message, self.contexts_[:],
+ raise ExceptionWithUserContext(message, self.contexts_[:],
stack=traceback.extract_stack())
-
-
+
+
diff --git a/tools/build/src/build/feature.py b/tools/build/src/build/feature.py
index 827dae3408..6cf81a1a1f 100644
--- a/tools/build/src/build/feature.py
+++ b/tools/build/src/build/feature.py
@@ -1,15 +1,15 @@
# Status: ported, except for unit tests.
# Base revision: 64488
#
-# Copyright 2001, 2002, 2003 Dave Abrahams
-# Copyright 2002, 2006 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2001, 2002, 2003 Dave Abrahams
+# Copyright 2002, 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
import re
-from b2.util import utility, bjam_signature
+from b2.util import utility, bjam_signature, is_iterable_typed
import b2.util.set
from b2.util.utility import add_grist, get_grist, ungrist, replace_grist, to_seq
from b2.exceptions import *
@@ -25,6 +25,9 @@ class Feature(object):
_attribute_name_to_integer = {}
def __init__(self, name, values, attributes):
+ assert isinstance(name, basestring)
+ assert is_iterable_typed(values, basestring)
+ assert is_iterable_typed(attributes, basestring)
self._name = name
self._values = values
self._default = None
@@ -42,12 +45,19 @@ class Feature(object):
return self._values
def add_values(self, values):
+ assert is_iterable_typed(values, basestring)
self._values.extend(values)
def attributes(self):
return self._attributes
def set_default(self, value):
+ assert isinstance(value, basestring)
+ for attr in ('free', 'optional'):
+ if getattr(self, attr)():
+ get_manager().errors()('"{}" feature "<{}>" cannot have a default value.'
+ .format(attr, self._name))
+
self._default = value
def default(self):
@@ -61,6 +71,7 @@ class Feature(object):
return self._subfeatures
def add_subfeature(self, name):
+ assert isinstance(name, Feature)
self._subfeatures.append(name)
def parent(self):
@@ -72,19 +83,21 @@ class Feature(object):
return self._parent
def set_parent(self, feature, value):
+ assert isinstance(feature, Feature)
+ assert isinstance(value, basestring)
self._parent = (feature, value)
def __str__(self):
return self._name
-
+
def reset ():
""" Clear the module state. This is mainly for testing purposes.
"""
global __all_attributes, __all_features, __implicit_features, __composite_properties
global __features_with_attributes, __subfeature_from_value, __all_top_features, __free_features
global __all_subfeatures
-
+
# The list with all attribute names.
__all_attributes = [ 'implicit',
'composite',
@@ -107,28 +120,28 @@ def reset ():
return getattr(self, "_attributes") & flag
setattr(Feature, a.replace("-", "_"), probe)
i = i << 1
-
+
# A map containing all features. The key is the feature name.
# The value is an instance of Feature class.
__all_features = {}
-
+
# All non-subfeatures.
__all_top_features = []
-
+
# Maps valus to the corresponding implicit feature
__implicit_features = {}
-
+
# A map containing all composite properties. The key is a Property instance,
# and the value is a list of Property instances
__composite_properties = {}
-
+
__features_with_attributes = {}
for attribute in __all_attributes:
__features_with_attributes [attribute] = []
-
+
# Maps a value to the corresponding subfeature name.
__subfeature_from_value = {}
-
+
# All free features
__free_features = []
@@ -146,6 +159,7 @@ def get(name):
Throws if no feature by such name exists
"""
+ assert isinstance(name, basestring)
return __all_features[name]
# FIXME: prepare-test/finish-test?
@@ -163,12 +177,12 @@ def feature (name, values, attributes = []):
__all_features[name] = feature
# Temporary measure while we have not fully moved from 'gristed strings'
__all_features["<" + name + ">"] = feature
-
+
for attribute in attributes:
__features_with_attributes [attribute].append (name)
name = add_grist(name)
-
+
if 'subfeature' in attributes:
__all_subfeatures.append(name)
else:
@@ -208,9 +222,10 @@ def set_default (feature, value):
def defaults(features):
""" Returns the default property values for the given features.
"""
+ assert is_iterable_typed(features, Feature)
# FIXME: should merge feature and property modules.
- import property
-
+ from . import property
+
result = []
for f in features:
if not f.free() and not f.optional() and f.default():
@@ -221,21 +236,22 @@ def defaults(features):
def valid (names):
""" Returns true iff all elements of names are valid features.
"""
- def valid_one (name): return __all_features.has_key (name)
-
- if isinstance (names, str):
- return valid_one (names)
- else:
- return all([ valid_one (name) for name in names ])
+ if isinstance(names, str):
+ names = [names]
+ assert is_iterable_typed(names, basestring)
+
+ return all(name in __all_features for name in names)
def attributes (feature):
""" Returns the attributes of the given feature.
"""
+ assert isinstance(feature, basestring)
return __all_features[feature].attributes_string_list()
-
+
def values (feature):
""" Return the values of the given feature.
"""
+ assert isinstance(feature, basestring)
validate_feature (feature)
return __all_features[feature].values()
@@ -243,43 +259,43 @@ def is_implicit_value (value_string):
""" Returns true iff 'value_string' is a value_string
of an implicit feature.
"""
-
+ assert isinstance(value_string, basestring)
if __implicit_features.has_key(value_string):
return __implicit_features[value_string]
-
+
v = value_string.split('-')
if not __implicit_features.has_key(v[0]):
return False
feature = __implicit_features[v[0]]
-
+
for subvalue in (v[1:]):
if not __find_implied_subfeature(feature, subvalue, v[0]):
return False
-
+
return True
def implied_feature (implicit_value):
""" Returns the implicit feature associated with the given implicit value.
"""
+ assert isinstance(implicit_value, basestring)
components = implicit_value.split('-')
-
+
if not __implicit_features.has_key(components[0]):
raise InvalidValue ("'%s' is not a value of an implicit feature" % implicit_value)
-
+
return __implicit_features[components[0]]
def __find_implied_subfeature (feature, subvalue, value_string):
-
- #if value_string == None: value_string = ''
+ assert isinstance(feature, Feature)
+ assert isinstance(subvalue, basestring)
+ assert isinstance(value_string, basestring)
- if not __subfeature_from_value.has_key(feature) \
- or not __subfeature_from_value[feature].has_key(value_string) \
- or not __subfeature_from_value[feature][value_string].has_key (subvalue):
+ try:
+ return __subfeature_from_value[feature][value_string][subvalue]
+ except KeyError:
return None
-
- return __subfeature_from_value[feature][value_string][subvalue]
# Given a feature and a value of one of its subfeatures, find the name
# of the subfeature. If value-string is supplied, looks for implied
@@ -289,6 +305,9 @@ def __find_implied_subfeature (feature, subvalue, value_string):
# value-string # The value of the main feature
def implied_subfeature (feature, subvalue, value_string):
+ assert isinstance(feature, Feature)
+ assert isinstance(subvalue, basestring)
+ assert isinstance(value_string, basestring)
result = __find_implied_subfeature (feature, subvalue, value_string)
if not result:
raise InvalidValue ("'%s' is not a known subfeature value of '%s%s'" % (subvalue, feature, value_string))
@@ -298,28 +317,19 @@ def implied_subfeature (feature, subvalue, value_string):
def validate_feature (name):
""" Checks if all name is a valid feature. Otherwise, raises an exception.
"""
+ assert isinstance(name, basestring)
if not __all_features.has_key(name):
raise InvalidFeature ("'%s' is not a valid feature name" % name)
else:
return __all_features[name]
-def valid (names):
- """ Returns true iff all elements of names are valid features.
- """
- def valid_one (name): return __all_features.has_key (name)
-
- if isinstance (names, str):
- return valid_one (names)
- else:
- return [ valid_one (name) for name in names ]
-
# Uses Property
-def __expand_subfeatures_aux (property, dont_validate = False):
+def __expand_subfeatures_aux (property_, dont_validate = False):
""" Helper for expand_subfeatures.
Given a feature and value, or just a value corresponding to an
implicit feature, returns a property set consisting of all component
subfeatures and their values. For example:
-
+
expand_subfeatures <toolset>gcc-2.95.2-linux-x86
-> <toolset>gcc <toolset-version>2.95.2 <toolset-os>linux <toolset-cpu>x86
equivalent to:
@@ -329,33 +339,35 @@ def __expand_subfeatures_aux (property, dont_validate = False):
value: The value of the feature.
dont_validate: If True, no validation of value string will be done.
"""
- f = property.feature()
- v = property.value()
+ from . import property # no __debug__ since Property is used elsewhere
+ assert isinstance(property_, property.Property)
+ assert isinstance(dont_validate, int) # matches bools
+
+ f = property_.feature()
+ v = property_.value()
if not dont_validate:
validate_value_string(f, v)
components = v.split ("-")
-
+
v = components[0]
- import property
+ result = [property.Property(f, components[0])]
- result = [property.Property(f, components[0])]
-
subvalues = components[1:]
while len(subvalues) > 0:
subvalue = subvalues [0] # pop the head off of subvalues
subvalues = subvalues [1:]
-
+
subfeature = __find_implied_subfeature (f, subvalue, v)
-
+
# If no subfeature was found, reconstitute the value string and use that
if not subfeature:
return [property.Property(f, '-'.join(components))]
-
+
result.append(property.Property(subfeature, subvalue))
-
+
return result
def expand_subfeatures(properties, dont_validate = False):
@@ -363,11 +375,11 @@ def expand_subfeatures(properties, dont_validate = False):
Make all elements of properties corresponding to implicit features
explicit, and express all subfeature values as separate properties
in their own right. For example, the property
-
+
gcc-2.95.2-linux-x86
-
+
might expand to
-
+
<toolset>gcc <toolset-version>2.95.2 <toolset-os>linux <toolset-cpu>x86
properties: A sequence with elements of the form
@@ -375,6 +387,10 @@ def expand_subfeatures(properties, dont_validate = False):
case of implicit features.
: dont_validate: If True, no validation of value string will be done.
"""
+ if __debug__:
+ from .property import Property
+ assert is_iterable_typed(properties, Property)
+ assert isinstance(dont_validate, int) # matches bools
result = []
for p in properties:
# Don't expand subfeatures in subfeatures
@@ -408,6 +424,8 @@ def expand_subfeatures(properties, dont_validate = False):
def extend (name, values):
""" Adds the given values to the given feature.
"""
+ assert isinstance(name, basestring)
+ assert is_iterable_typed(values, basestring)
name = add_grist (name)
__validate_feature (name)
feature = __all_features [name]
@@ -419,7 +437,7 @@ def extend (name, values):
__implicit_features[v] = feature
- if len (feature.values()) == 0 and len (values) > 0:
+ if values and not feature.values() and not(feature.free() or feature.optional()):
# This is the first value specified for this feature,
# take it as default value
feature.set_default(values[0])
@@ -429,6 +447,8 @@ def extend (name, values):
def validate_value_string (f, value_string):
""" Checks that value-string is a valid value-string for the given feature.
"""
+ assert isinstance(f, Feature)
+ assert isinstance(value_string, basestring)
if f.free() or value_string in f.values():
return
@@ -453,39 +473,42 @@ def validate_value_string (f, value_string):
value-string is provided, the subvalues are only valid for the given
value of the feature. Thus, you could say that
<target-platform>mingw is specifc to <toolset>gcc-2.95.2 as follows:
-
+
extend-subfeature toolset gcc-2.95.2 : target-platform : mingw ;
feature: The feature whose subfeature is being extended.
-
+
value-string: If supplied, specifies a specific value of the
main feature for which the new subfeature values
are valid.
-
+
subfeature: The name of the subfeature.
-
+
subvalues: The additional values of the subfeature being defined.
"""
def extend_subfeature (feature_name, value_string, subfeature_name, subvalues):
-
+ assert isinstance(feature_name, basestring)
+ assert isinstance(value_string, basestring)
+ assert isinstance(subfeature_name, basestring)
+ assert is_iterable_typed(subvalues, basestring)
feature = validate_feature(feature_name)
-
+
if value_string:
validate_value_string(feature, value_string)
subfeature_name = feature_name + '-' + __get_subfeature_name (subfeature_name, value_string)
-
+
extend(subfeature_name, subvalues) ;
subfeature = __all_features[subfeature_name]
if value_string == None: value_string = ''
-
+
if not __subfeature_from_value.has_key(feature):
__subfeature_from_value [feature] = {}
-
+
if not __subfeature_from_value[feature].has_key(value_string):
__subfeature_from_value [feature][value_string] = {}
-
+
for subvalue in subvalues:
__subfeature_from_value [feature][value_string][subvalue] = subfeature
@@ -496,16 +519,16 @@ def subfeature (feature_name, value_string, subfeature, subvalues, attributes =
feature_name: Root feature that is not a subfeature.
value_string: An optional value-string specifying which feature or
subfeature values this subfeature is specific to,
- if any.
+ if any.
subfeature: The name of the subfeature being declared.
subvalues: The allowed values of this subfeature.
attributes: The attributes of the subfeature.
"""
parent_feature = validate_feature (feature_name)
-
+
# Add grist to the subfeature name if a value-string was supplied
subfeature_name = __get_subfeature_name (subfeature, value_string)
-
+
if subfeature_name in __all_features[feature_name].subfeatures():
message = "'%s' already declared as a subfeature of '%s'" % (subfeature, feature_name)
message += " specific to '%s'" % value_string
@@ -514,7 +537,7 @@ def subfeature (feature_name, value_string, subfeature, subvalues, attributes =
# First declare the subfeature as a feature in its own right
f = feature (feature_name + '-' + subfeature_name, subvalues, attributes + ['subfeature'])
f.set_parent(parent_feature, value_string)
-
+
parent_feature.add_subfeature(f)
# Now make sure the subfeature values are known.
@@ -527,7 +550,7 @@ def compose (composite_property_s, component_properties_s):
All parameters are <feature>value strings
"""
- import property
+ from . import property
component_properties_s = to_seq (component_properties_s)
composite_property = property.create_from_string(composite_property_s)
@@ -537,7 +560,7 @@ def compose (composite_property_s, component_properties_s):
component_properties = component_properties_s
else:
component_properties = [property.create_from_string(p) for p in component_properties_s]
-
+
if not f.composite():
raise BaseException ("'%s' is not a composite feature" % f)
@@ -550,10 +573,13 @@ def compose (composite_property_s, component_properties_s):
__composite_properties[composite_property] = component_properties
-def expand_composite(property):
- result = [ property ]
- if __composite_properties.has_key(property):
- for p in __composite_properties[property]:
+def expand_composite(property_):
+ if __debug__:
+ from .property import Property
+ assert isinstance(property_, Property)
+ result = [ property_ ]
+ if __composite_properties.has_key(property_):
+ for p in __composite_properties[property_]:
result.extend(expand_composite(p))
return result
@@ -567,7 +593,7 @@ def get_values (feature, properties):
for p in properties:
if get_grist (p) == feature:
result.append (replace_grist (p, ''))
-
+
return result
def free_features ():
@@ -579,6 +605,9 @@ def expand_composites (properties):
""" Expand all composite properties in the set so that all components
are explicitly expressed.
"""
+ if __debug__:
+ from .property import Property
+ assert is_iterable_typed(properties, Property)
explicit_features = set(p.feature() for p in properties)
result = []
@@ -604,7 +633,7 @@ def expand_composites (properties):
result.append (x)
elif any(r.feature() == f for r in result):
raise FeatureConflict ("explicitly-specified values of non-free feature '%s' conflict\n"
- "existing values: '%s'\nvalue from expanding '%s': '%s'" % (f,
+ "existing values: '%s'\nvalue from expanding '%s': '%s'" % (f,
[r.value() for r in result if r.feature() == f], p, x.value()))
else:
result.append (x)
@@ -617,6 +646,11 @@ def is_subfeature_of (parent_property, f):
feature, or if f is a subfeature of the parent_property's feature
specific to the parent_property's value.
"""
+ if __debug__:
+ from .property import Property
+ assert isinstance(parent_property, Property)
+ assert isinstance(f, Feature)
+
if not f.subfeature():
return False
@@ -638,39 +672,28 @@ def is_subfeature_of (parent_property, f):
def __is_subproperty_of (parent_property, p):
""" As is_subfeature_of, for subproperties.
"""
+ if __debug__:
+ from .property import Property
+ assert isinstance(parent_property, Property)
+ assert isinstance(p, Property)
return is_subfeature_of (parent_property, p.feature())
-
+
# Returns true iff the subvalue is valid for the feature. When the
# optional value-string is provided, returns true iff the subvalues
# are valid for the given value of the feature.
def is_subvalue(feature, value_string, subfeature, subvalue):
-
+ assert isinstance(feature, basestring)
+ assert isinstance(value_string, basestring)
+ assert isinstance(subfeature, basestring)
+ assert isinstance(subvalue, basestring)
if not value_string:
value_string = ''
-
- if not __subfeature_from_value.has_key(feature):
- return False
-
- if not __subfeature_from_value[feature].has_key(value_string):
- return False
-
- if not __subfeature_from_value[feature][value_string].has_key(subvalue):
+ try:
+ return __subfeature_from_value[feature][value_string][subvalue] == subfeature
+ except KeyError:
return False
- if __subfeature_from_value[feature][value_string][subvalue]\
- != subfeature:
- return False
-
- return True
-
-def implied_subfeature (feature, subvalue, value_string):
- result = __find_implied_subfeature (feature, subvalue, value_string)
- if not result:
- raise InvalidValue ("'%s' is not a known subfeature value of '%s%s'" % (subvalue, feature, value_string))
-
- return result
-
# Uses Property
def expand (properties):
@@ -684,43 +707,50 @@ def expand (properties):
two values of a given non-free feature are directly expressed in the
input, an error is issued.
"""
+ if __debug__:
+ from .property import Property
+ assert is_iterable_typed(properties, Property)
expanded = expand_subfeatures(properties)
return expand_composites (expanded)
-
+
# Accepts list of Property objects
def add_defaults (properties):
""" Given a set of properties, add default values for features not
- represented in the set.
+ represented in the set.
Note: if there's there's ordinary feature F1 and composite feature
F2, which includes some value for F1, and both feature have default values,
then the default value of F1 will be added, not the value in F2. This might
not be right idea: consider
-
+
feature variant : debug ... ;
<variant>debug : .... <runtime-debugging>on
feature <runtime-debugging> : off on ;
-
+
Here, when adding default for an empty property set, we'll get
-
+
<variant>debug <runtime_debugging>off
-
- and that's kind of strange.
+
+ and that's kind of strange.
"""
+ if __debug__:
+ from .property import Property
+ assert is_iterable_typed(properties, Property)
+
result = [x for x in properties]
-
+
handled_features = set()
for p in properties:
# We don't add default for conditional properties. We don't want
# <variant>debug:<define>DEBUG to be takes as specified value for <variant>
if not p.condition():
handled_features.add(p.feature())
-
+
missing_top = [f for f in __all_top_features if not f in handled_features]
more = defaults(missing_top)
result.extend(more)
for p in more:
handled_features.add(p.feature())
-
+
# Add defaults for subfeatures of features which are present
for p in result[:]:
s = p.feature().subfeatures()
@@ -728,7 +758,7 @@ def add_defaults (properties):
for p in more:
handled_features.add(p.feature())
result.extend(more)
-
+
return result
def minimize (properties):
@@ -739,29 +769,31 @@ def minimize (properties):
Implicit properties will be expressed without feature
grist, and sub-property values will be expressed as elements joined
to the corresponding main property.
- """
-
+ """
+ if __debug__:
+ from .property import Property
+ assert is_iterable_typed(properties, Property)
# remove properties implied by composite features
components = []
for property in properties:
if __composite_properties.has_key (property):
components.extend(__composite_properties[property])
properties = b2.util.set.difference (properties, components)
-
+
# handle subfeatures and implicit features
# move subfeatures to the end of the list
properties = [p for p in properties if not p.feature().subfeature()] +\
[p for p in properties if p.feature().subfeature()]
-
+
result = []
while properties:
p = properties[0]
f = p.feature()
-
+
# locate all subproperties of $(x[1]) in the property set
subproperties = __select_subproperties (p, properties)
-
+
if subproperties:
# reconstitute the joined property name
subproperties.sort ()
@@ -774,7 +806,7 @@ def minimize (properties):
# eliminate properties whose value is equal to feature's
# default and which are not symmetric and which do not
# contradict values implied by composite properties.
-
+
# since all component properties of composites in the set
# have been eliminated, any remaining property whose
# feature is the same as a component of a composite in the
@@ -784,7 +816,7 @@ def minimize (properties):
#\
#or get_grist (fullp) in get_grist (components):
# FIXME: restore above
-
+
properties = properties[1:]
@@ -802,17 +834,17 @@ def split (properties):
substitution of backslashes for slashes, since Jam, unbidden,
sometimes swaps slash direction on NT.
"""
-
+ assert isinstance(properties, basestring)
def split_one (properties):
pieces = re.split (__re_slash_or_backslash, properties)
result = []
-
+
for x in pieces:
if not get_grist (x) and len (result) > 0 and get_grist (result [-1]):
result = result [0:-1] + [ result [-1] + '/' + x ]
else:
result.append (x)
-
+
return result
if isinstance (properties, str):
@@ -822,32 +854,34 @@ def split (properties):
for p in properties:
result += split_one (p)
return result
-
+
def compress_subproperties (properties):
""" Combine all subproperties into their parent properties
Requires: for every subproperty, there is a parent property. All
features are explicitly expressed.
-
+
This rule probably shouldn't be needed, but
build-request.expand-no-defaults is being abused for unintended
purposes and it needs help
"""
+ from .property import Property
+ assert is_iterable_typed(properties, Property)
result = []
matched_subs = set()
all_subs = set()
for p in properties:
f = p.feature()
-
+
if not f.subfeature():
subs = __select_subproperties (p, properties)
if subs:
-
+
matched_subs.update(subs)
subvalues = '-'.join (sub.value() for sub in subs)
- result.append(b2.build.property.Property(
+ result.append(Property(
p.feature(), p.value() + '-' + subvalues,
p.condition()))
else:
@@ -865,10 +899,16 @@ def compress_subproperties (properties):
# Private methods
def __select_subproperties (parent_property, properties):
+ if __debug__:
+ from .property import Property
+ assert is_iterable_typed(properties, Property)
+ assert isinstance(parent_property, Property)
return [ x for x in properties if __is_subproperty_of (parent_property, x) ]
def __get_subfeature_name (subfeature, value_string):
- if value_string == None:
+ assert isinstance(subfeature, basestring)
+ assert isinstance(value_string, basestring) or value_string is None
+ if value_string == None:
prefix = ''
else:
prefix = value_string + ':'
@@ -877,10 +917,12 @@ def __get_subfeature_name (subfeature, value_string):
def __validate_feature_attributes (name, attributes):
+ assert isinstance(name, basestring)
+ assert is_iterable_typed(attributes, basestring)
for attribute in attributes:
if not attribute in __all_attributes:
raise InvalidAttribute ("unknown attributes: '%s' in feature declaration: '%s'" % (str (b2.util.set.difference (attributes, __all_attributes)), name))
-
+
if name in __all_features:
raise AlreadyDefined ("feature '%s' already defined" % name)
elif 'implicit' in attributes and 'free' in attributes:
@@ -888,10 +930,11 @@ def __validate_feature_attributes (name, attributes):
elif 'free' in attributes and 'propagated' in attributes:
raise InvalidAttribute ("free features cannot also be propagated (in declaration of feature '%s')" % name)
-
+
def __validate_feature (feature):
""" Generates an error if the feature is unknown.
"""
+ assert isinstance(feature, basestring)
if not __all_features.has_key (feature):
raise BaseException ('unknown feature "%s"' % feature)
@@ -902,6 +945,10 @@ def __select_subfeatures (parent_property, features):
subfeatures of the property's feature which are conditional on the
property's value.
"""
+ if __debug__:
+ from .property import Property
+ assert isinstance(parent_property, Property)
+ assert is_iterable_typed(features, Feature)
return [f for f in features if is_subfeature_of (parent_property, f)]
-
+
# FIXME: copy over tests.
diff --git a/tools/build/src/build/generators.py b/tools/build/src/build/generators.py
index dd195a840f..f1c5145560 100644
--- a/tools/build/src/build/generators.py
+++ b/tools/build/src/build/generators.py
@@ -18,13 +18,13 @@
# It starts by selecting 'viable generators', which have any chances of producing
# the desired target type with the required properties. Generators are ranked and
# a set of most specific ones is selected.
-#
+#
# The most specific generators have their 'run' methods called, with the properties
# and list of sources. Each one selects target which can be directly consumed, and
# tries to convert the remaining ones to the types it can consume. This is done
# by recursively calling 'construct' with all consumable types.
#
-# If the generator has collected all the targets it needs, it creates targets
+# If the generator has collected all the targets it needs, it creates targets
# corresponding to result, and returns it. When all generators have been run,
# results of one of them are selected and returned as result.
#
@@ -35,7 +35,7 @@
# Likewise, when generator tries to convert sources to consumable types, it can get
# more targets that it was asked for. The question is what to do with extra targets.
# Boost.Build attempts to convert them to requested types, and attempts as early as
-# possible. Specifically, this is done after invoking each generator. (Later I'll
+# possible. Specifically, this is done after invoking each generator. (Later I'll
# document the rationale for trying extra target conversion at that point).
#
# That early conversion is not always desirable. Suppose a generator got a source of
@@ -52,10 +52,10 @@ import cStringIO
import os.path
from virtual_target import Subvariant
-import virtual_target, type, property_set, property
+from . import virtual_target, type, property_set, property
from b2.util.logger import *
from b2.util.utility import *
-from b2.util import set
+from b2.util import set as set_, is_iterable_typed, is_iterable
from b2.util.sequence import unique
import b2.util.sequence as sequence
from b2.manager import get_manager
@@ -73,8 +73,8 @@ def reset ():
__type_to_generators = {}
__generators_for_toolset = {}
__overrides = {}
-
- # TODO: can these be global?
+
+ # TODO: can these be global?
__construct_stack = []
__viable_generators_cache = {}
__viable_source_types_cache = {}
@@ -95,7 +95,7 @@ __indent = ""
def debug():
global __debug
if __debug is None:
- __debug = "--debug-generators" in bjam.variable("ARGV")
+ __debug = "--debug-generators" in bjam.variable("ARGV")
return __debug
def increase_indent():
@@ -114,7 +114,7 @@ def decrease_indent():
# same generator. Does nothing if a non-derived target type is passed to it.
#
def update_cached_information_with_a_new_type(type):
-
+ assert isinstance(type, basestring)
base_type = b2.build.type.base(type)
if base_type:
@@ -153,7 +153,7 @@ def invalidate_extendable_viable_source_target_type_cache():
__vst_cached_types.append(t)
else:
del __viable_source_types_cache[t]
-
+
def dout(message):
if debug():
print __indent + message
@@ -162,7 +162,7 @@ class Generator:
""" Creates a generator.
manager: the build manager.
id: identifies the generator
-
+
rule: the rule which sets up build actions.
composing: whether generator processes each source target in
@@ -171,49 +171,52 @@ class Generator:
recusrive generators.construct_types call.
source_types (optional): types that this generator can handle
-
+
target_types_and_names: types the generator will create and, optionally, names for
created targets. Each element should have the form
type["(" name-pattern ")"]
for example, obj(%_x). Name of generated target will be found
by replacing % with the name of source, provided explicit name
was not specified.
-
+
requirements (optional)
-
+
NOTE: all subclasses must have a similar signature for clone to work!
"""
def __init__ (self, id, composing, source_types, target_types_and_names, requirements = []):
- assert(not isinstance(source_types, str))
- assert(not isinstance(target_types_and_names, str))
+ assert isinstance(id, basestring)
+ assert isinstance(composing, bool)
+ assert is_iterable_typed(source_types, basestring)
+ assert is_iterable_typed(target_types_and_names, basestring)
+ assert is_iterable_typed(requirements, basestring)
self.id_ = id
self.composing_ = composing
self.source_types_ = source_types
self.target_types_and_names_ = target_types_and_names
self.requirements_ = requirements
-
+
self.target_types_ = []
self.name_prefix_ = []
self.name_postfix_ = []
-
+
for e in target_types_and_names:
# Create three parallel lists: one with the list of target types,
- # and two other with prefixes and postfixes to be added to target
+ # and two other with prefixes and postfixes to be added to target
# name. We use parallel lists for prefix and postfix (as opposed
# to mapping), because given target type might occur several times,
# for example "H H(%_symbols)".
m = _re_separate_types_prefix_and_postfix.match (e)
-
+
if not m:
raise BaseException ("Invalid type and name '%s' in declaration of type '%s'" % (e, id))
-
+
target_type = m.group (1)
if not target_type: target_type = ''
prefix = m.group (3)
if not prefix: prefix = ''
postfix = m.group (4)
if not postfix: postfix = ''
-
+
self.target_types_.append (target_type)
self.name_prefix_.append (prefix)
self.name_postfix_.append (postfix)
@@ -229,9 +232,11 @@ class Generator:
- id
- value to <toolset> feature in properties
"""
- return self.__class__ (new_id,
- self.composing_,
- self.source_types_,
+ assert isinstance(new_id, basestring)
+ assert is_iterable_typed(new_toolset_properties, basestring)
+ return self.__class__ (new_id,
+ self.composing_,
+ self.source_types_,
self.target_types_and_names_,
# Note: this does not remove any subfeatures of <toolset>
# which might cause problems
@@ -241,11 +246,13 @@ class Generator:
"""Creates another generator that is the same as $(self), except that
if 'base' is in target types of $(self), 'type' will in target types
of the new generator."""
+ assert isinstance(base, basestring)
+ assert isinstance(type, basestring)
target_types = []
for t in self.target_types_and_names_:
m = _re_match_type.match(t)
assert m
-
+
if m.group(1) == base:
if m.group(2):
target_types.append(type + m.group(2))
@@ -258,7 +265,7 @@ class Generator:
self.source_types_,
target_types,
self.requirements_)
-
+
def id(self):
return self.id_
@@ -271,28 +278,29 @@ class Generator:
def target_types (self):
""" Returns the list of target types that this generator produces.
It is assumed to be always the same -- i.e. it cannot change depending
- list of sources.
+ list of sources.
"""
return self.target_types_
def requirements (self):
""" Returns the required properties for this generator. Properties
- in returned set must be present in build properties if this
+ in returned set must be present in build properties if this
generator is to be used. If result has grist-only element,
that build properties must include some value of that feature.
"""
return self.requirements_
def match_rank (self, ps):
- """ Returns true if the generator can be run with the specified
+ """ Returns true if the generator can be run with the specified
properties.
"""
# See if generator's requirements are satisfied by
# 'properties'. Treat a feature name in requirements
# (i.e. grist-only element), as matching any value of the
# feature.
+ assert isinstance(ps, property_set.PropertySet)
all_requirements = self.requirements ()
-
+
property_requirements = []
feature_requirements = []
# This uses strings because genenator requirements allow
@@ -304,31 +312,38 @@ class Generator:
else:
feature_requirements.append (r)
-
+
return all(ps.get(get_grist(s)) == [get_value(s)] for s in property_requirements) \
and all(ps.get(get_grist(s)) for s in feature_requirements)
-
+
def run (self, project, name, prop_set, sources):
""" Tries to invoke this generator on the given sources. Returns a
list of generated targets (instances of 'virtual-target').
project: Project for which the targets are generated.
-
- name: Determines the name of 'name' attribute for
+
+ name: Determines the name of 'name' attribute for
all generated targets. See 'generated_targets' method.
-
+
prop_set: Desired properties for generated targets.
-
+
sources: Source targets.
"""
-
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(project, ProjectTarget)
+ # intermediary targets don't have names, so None is possible
+ assert isinstance(name, basestring) or name is None
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+
if project.manager ().logger ().on ():
project.manager ().logger ().log (__name__, " generator '%s'" % self.id_)
project.manager ().logger ().log (__name__, " composing: '%s'" % self.composing_)
-
+
if not self.composing_ and len (sources) > 1 and len (self.source_types_) > 1:
raise BaseException ("Unsupported source/source_type combination")
-
+
# We don't run composing generators if no name is specified. The reason
# is that composing generator combines several targets, which can have
# different names, and it cannot decide which name to give for produced
@@ -337,7 +352,7 @@ class Generator:
# This in effect, means that composing generators are runnable only
# at top-level of transofrmation graph, or if name is passed explicitly.
# Thus, we dissallow composing generators in the middle. For example, the
- # transofrmation CPP -> OBJ -> STATIC_LIB -> RSP -> EXE won't be allowed
+ # transofrmation CPP -> OBJ -> STATIC_LIB -> RSP -> EXE won't be allowed
# (the OBJ -> STATIC_LIB generator is composing)
if not self.composing_ or name:
return self.run_really (project, name, prop_set, sources)
@@ -345,15 +360,21 @@ class Generator:
return []
def run_really (self, project, name, prop_set, sources):
-
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(project, ProjectTarget)
+ # intermediary targets don't have names, so None is possible
+ assert isinstance(name, basestring) or name is None
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
# consumed: Targets that this generator will consume directly.
# bypassed: Targets that can't be consumed and will be returned as-is.
-
+
if self.composing_:
(consumed, bypassed) = self.convert_multiple_sources_to_consumable_types (project, prop_set, sources)
else:
(consumed, bypassed) = self.convert_to_consumable_types (project, name, prop_set, sources)
-
+
result = []
if consumed:
result = self.construct_result (consumed, project, name, prop_set)
@@ -369,17 +390,23 @@ class Generator:
return result
def construct_result (self, consumed, project, name, prop_set):
- """ Constructs the dependency graph that will be returned by this
+ """ Constructs the dependency graph that will be returned by this
generator.
consumed: Already prepared list of consumable targets
- If generator requires several source files will contain
+ If generator requires several source files will contain
exactly len $(self.source_types_) targets with matching types
- Otherwise, might contain several targets with the type of
+ Otherwise, might contain several targets with the type of
self.source_types_ [0]
project:
name:
prop_set: Properties to be used for all actions create here
"""
+ if __debug__:
+ from .targets import ProjectTarget
+ assert is_iterable_typed(consumed, virtual_target.VirtualTarget)
+ assert isinstance(project, ProjectTarget)
+ assert isinstance(name, basestring) or name is None
+ assert isinstance(prop_set, property_set.PropertySet)
result = []
# If this is 1->1 transformation, apply it to all consumed targets in order.
if len (self.source_types_) < 2 and not self.composing_:
@@ -395,6 +422,7 @@ class Generator:
return result
def determine_target_name(self, fullname):
+ assert isinstance(fullname, basestring)
# Determine target name from fullname (maybe including path components)
# Place optional prefix and postfix around basename
@@ -415,7 +443,8 @@ class Generator:
def determine_output_name(self, sources):
"""Determine the name of the produced target from the
names of the sources."""
-
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+
# The simple case if when a name
# of source has single dot. Then, we take the part before
# dot. Several dots can be caused by:
@@ -426,47 +455,53 @@ class Generator:
# dot. In the second case -- no sure, but for now take
# the part till the last dot too.
name = os.path.splitext(sources[0].name())[0]
-
+
for s in sources[1:]:
n2 = os.path.splitext(s.name())
if n2 != name:
get_manager().errors()(
"%s: source targets have different names: cannot determine target name"
% (self.id_))
-
+
# Names of sources might include directory. We should strip it.
return self.determine_target_name(sources[0].name())
-
-
+
+
def generated_targets (self, sources, prop_set, project, name):
""" Constructs targets that are created after consuming 'sources'.
The result will be the list of virtual-target, which the same length
as 'target_types' attribute and with corresponding types.
-
- When 'name' is empty, all source targets must have the same value of
+
+ When 'name' is empty, all source targets must have the same value of
the 'name' attribute, which will be used instead of the 'name' argument.
-
+
The value of 'name' attribute for each generated target will be equal to
the 'name' parameter if there's no name pattern for this type. Otherwise,
- the '%' symbol in the name pattern will be replaced with the 'name' parameter
+ the '%' symbol in the name pattern will be replaced with the 'name' parameter
to obtain the 'name' attribute.
-
+
For example, if targets types are T1 and T2(with name pattern "%_x"), suffixes
for T1 and T2 are .t1 and t2, and source if foo.z, then created files would
be "foo.t1" and "foo_x.t2". The 'name' attribute actually determined the
basename of a file.
-
+
Note that this pattern mechanism has nothing to do with implicit patterns
- in make. It's a way to produce target which name is different for name of
+ in make. It's a way to produce target which name is different for name of
source.
"""
+ if __debug__:
+ from .targets import ProjectTarget
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert isinstance(project, ProjectTarget)
+ assert isinstance(name, basestring) or name is None
if not name:
name = self.determine_output_name(sources)
-
+
# Assign an action for each target
action = self.action_class()
a = action(project.manager(), sources, self.id_, prop_set)
-
+
# Create generated target for each target type.
targets = []
pre = self.name_prefix_
@@ -477,9 +512,9 @@ class Generator:
generated_name = os.path.join(os.path.dirname(name), generated_name)
pre = pre[1:]
post = post[1:]
-
+
targets.append(virtual_target.FileTarget(generated_name, t, project, a))
-
+
return [ project.manager().virtual_targets().register(t) for t in targets ]
def convert_to_consumable_types (self, project, name, prop_set, sources, only_one=False):
@@ -489,17 +524,24 @@ class Generator:
only_one: convert 'source' to only one of source types
if there's more that one possibility, report an
error.
-
+
Returns a pair:
- consumed: all targets that can be consumed.
+ consumed: all targets that can be consumed.
bypassed: all targets that cannot be consumed.
"""
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(name, basestring) or name is None
+ assert isinstance(project, ProjectTarget)
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+ assert isinstance(only_one, bool)
consumed = []
bypassed = []
- missing_types = []
+ missing_types = []
if len (sources) > 1:
- # Don't know how to handle several sources yet. Just try
+ # Don't know how to handle several sources yet. Just try
# to pass the request to other generator
missing_types = self.source_types_
@@ -507,26 +549,26 @@ class Generator:
(c, m) = self.consume_directly (sources [0])
consumed += c
missing_types += m
-
+
# No need to search for transformation if
# some source type has consumed source and
# no more source types are needed.
if only_one and consumed:
missing_types = []
-
+
#TODO: we should check that only one source type
#if create of 'only_one' is true.
# TODO: consider if consuned/bypassed separation should
# be done by 'construct_types'.
-
+
if missing_types:
transformed = construct_types (project, name, missing_types, prop_set, sources)
-
+
# Add targets of right type to 'consumed'. Add others to
# 'bypassed'. The 'generators.construct' rule has done
# its best to convert everything to the required type.
# There's no need to rerun it on targets of different types.
-
+
# NOTE: ignoring usage requirements
for t in transformed[1]:
if t.type() in missing_types:
@@ -534,36 +576,45 @@ class Generator:
else:
bypassed.append(t)
-
+
consumed = unique(consumed)
bypassed = unique(bypassed)
-
+
# remove elements of 'bypassed' that are in 'consumed'
-
- # Suppose the target type of current generator, X is produced from
+
+ # Suppose the target type of current generator, X is produced from
# X_1 and X_2, which are produced from Y by one generator.
# When creating X_1 from Y, X_2 will be added to 'bypassed'
# Likewise, when creating X_2 from Y, X_1 will be added to 'bypassed'
# But they are also in 'consumed'. We have to remove them from
# bypassed, so that generators up the call stack don't try to convert
- # them.
+ # them.
# In this particular case, X_1 instance in 'consumed' and X_1 instance
# in 'bypassed' will be the same: because they have the same source and
# action name, and 'virtual-target.register' won't allow two different
# instances. Therefore, it's OK to use 'set.difference'.
-
+
bypassed = set.difference(bypassed, consumed)
return (consumed, bypassed)
-
+
def convert_multiple_sources_to_consumable_types (self, project, prop_set, sources):
""" Converts several files to consumable types.
- """
+ """
consumed = []
bypassed = []
+ if __debug__:
+ from .targets import ProjectTarget
+
+ assert isinstance(project, ProjectTarget)
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+ assert isinstance(project, ProjectTarget)
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
# We process each source one-by-one, trying to convert it to
# a usable type.
for s in sources:
@@ -578,12 +629,13 @@ class Generator:
return (consumed, bypassed)
def consume_directly (self, source):
+ assert isinstance(source, virtual_target.VirtualTarget)
real_source_type = source.type ()
# If there are no source types, we can consume anything
source_types = self.source_types()
if not source_types:
- source_types = [real_source_type]
+ source_types = [real_source_type]
consumed = []
missing_types = []
@@ -596,9 +648,9 @@ class Generator:
missing_types.append (st)
return (consumed, missing_types)
-
+
def action_class (self):
- """ Returns the class to be used to actions. Default implementation
+ """ Returns the class to be used to actions. Default implementation
returns "action".
"""
return virtual_target.Action
@@ -607,11 +659,13 @@ class Generator:
def find (id):
""" Finds the generator with id. Returns None if not found.
"""
+ assert isinstance(id, basestring)
return __generators.get (id, None)
def register (g):
""" Registers new generator instance 'g'.
"""
+ assert isinstance(g, Generator)
id = g.id()
__generators [id] = g
@@ -660,6 +714,19 @@ def register (g):
invalidate_extendable_viable_source_target_type_cache()
+def check_register_types(fn):
+ def wrapper(id, source_types, target_types, requirements=[]):
+ assert isinstance(id, basestring)
+ assert is_iterable_typed(source_types, basestring)
+ assert is_iterable_typed(target_types, basestring)
+ assert is_iterable_typed(requirements, basestring)
+ return fn(id, source_types, target_types, requirements=requirements)
+ wrapper.__name__ = fn.__name__
+ wrapper.__doc__ = fn.__doc__
+ return wrapper
+
+
+@check_register_types
def register_standard (id, source_types, target_types, requirements = []):
""" Creates new instance of the 'generator' class and registers it.
Returns the creates instance.
@@ -671,6 +738,8 @@ def register_standard (id, source_types, target_types, requirements = []):
register (g)
return g
+
+@check_register_types
def register_composing (id, source_types, target_types, requirements = []):
g = Generator (id, True, source_types, target_types, requirements)
register (g)
@@ -679,6 +748,7 @@ def register_composing (id, source_types, target_types, requirements = []):
def generators_for_toolset (toolset):
""" Returns all generators which belong to 'toolset'.
"""
+ assert isinstance(toolset, basestring)
return __generators_for_toolset.get(toolset, [])
def override (overrider_id, overridee_id):
@@ -687,26 +757,29 @@ def override (overrider_id, overridee_id):
that could produce a target of certain type,
both those generators are amoung viable generators,
the overridden generator is immediately discarded.
-
+
The overridden generators are discarded immediately
after computing the list of viable generators, before
running any of them."""
-
+ assert isinstance(overrider_id, basestring)
+ assert isinstance(overridee_id, basestring)
+
__overrides.setdefault(overrider_id, []).append(overridee_id)
def __viable_source_types_real (target_type):
""" Returns a list of source type which can possibly be converted
to 'target_type' by some chain of generator invocation.
-
+
More formally, takes all generators for 'target_type' and
returns union of source types for those generators and result
of calling itself recusrively on source types.
"""
+ assert isinstance(target_type, basestring)
generators = []
# 't0' is the initial list of target types we need to process to get a list
# of their viable source target types. New target types will not be added to
- # this list.
+ # this list.
t0 = type.all_bases (target_type)
@@ -714,14 +787,14 @@ def __viable_source_types_real (target_type):
# list of their viable source target types. This list will get expanded as
# we locate more target types to process.
t = t0
-
+
result = []
while t:
- # Find all generators for current type.
+ # Find all generators for current type.
# Unlike 'find_viable_generators' we don't care about prop_set.
generators = __type_to_generators.get (t [0], [])
t = t[1:]
-
+
for g in generators:
if not g.source_types():
# Empty source types -- everything can be accepted
@@ -729,7 +802,7 @@ def __viable_source_types_real (target_type):
# This will terminate outer loop.
t = None
break
-
+
for source_type in g.source_types ():
if not source_type in result:
# If generator accepts 'source_type' it
@@ -750,13 +823,14 @@ def __viable_source_types_real (target_type):
if not n in t0:
t.append (n)
result.append (n)
-
+
return result
def viable_source_types (target_type):
""" Helper rule, caches the result of '__viable_source_types_real'.
"""
+ assert isinstance(target_type, basestring)
if not __viable_source_types_cache.has_key(target_type):
__vst_cached_types.append(target_type)
__viable_source_types_cache [target_type] = __viable_source_types_real (target_type)
@@ -767,6 +841,7 @@ def viable_source_types_for_generator_real (generator):
method of 'generator', has some change of being eventually used
(probably after conversion by other generators)
"""
+ assert isinstance(generator, Generator)
source_types = generator.source_types ()
if not source_types:
@@ -791,15 +866,24 @@ def viable_source_types_for_generator_real (generator):
def viable_source_types_for_generator (generator):
""" Caches the result of 'viable_source_types_for_generator'.
"""
+ assert isinstance(generator, Generator)
if not __viable_source_types_cache.has_key(generator):
__vstg_cached_generators.append(generator)
__viable_source_types_cache[generator] = viable_source_types_for_generator_real (generator)
-
+
return __viable_source_types_cache[generator]
def try_one_generator_really (project, name, generator, target_type, properties, sources):
""" Returns usage requirements + list of created targets.
"""
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(project, ProjectTarget)
+ assert isinstance(name, basestring) or name is None
+ assert isinstance(generator, Generator)
+ assert isinstance(target_type, basestring)
+ assert isinstance(properties, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
targets = generator.run (project, name, properties, sources)
usage_requirements = []
@@ -809,7 +893,7 @@ def try_one_generator_really (project, name, generator, target_type, properties,
if targets:
success = True;
-
+
if isinstance (targets[0], property_set.PropertySet):
usage_requirements = targets [0]
targets = targets [1]
@@ -818,7 +902,7 @@ def try_one_generator_really (project, name, generator, target_type, properties,
usage_requirements = property_set.empty ()
dout( " generator" + generator.id() + " spawned ")
- # generators.dout [ indent ] " " $(targets) ;
+ # generators.dout [ indent ] " " $(targets) ;
# if $(usage-requirements)
# {
# generators.dout [ indent ] " with usage requirements:" $(x) ;
@@ -834,21 +918,29 @@ def try_one_generator (project, name, generator, target_type, properties, source
to fail. If so, quickly returns empty list. Otherwise, calls
try_one_generator_really.
"""
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(project, ProjectTarget)
+ assert isinstance(name, basestring) or name is None
+ assert isinstance(generator, Generator)
+ assert isinstance(target_type, basestring)
+ assert isinstance(properties, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
source_types = []
for s in sources:
source_types.append (s.type ())
viable_source_types = viable_source_types_for_generator (generator)
-
+
if source_types and viable_source_types != ['*'] and\
- not set.intersection (source_types, viable_source_types):
+ not set_.intersection (source_types, viable_source_types):
if project.manager ().logger ().on ():
- id = generator.id ()
+ id = generator.id ()
project.manager ().logger ().log (__name__, "generator '%s' pruned" % id)
project.manager ().logger ().log (__name__, "source_types" '%s' % source_types)
project.manager ().logger ().log (__name__, "viable_source_types '%s'" % viable_source_types)
-
+
return []
else:
@@ -856,10 +948,18 @@ def try_one_generator (project, name, generator, target_type, properties, source
def construct_types (project, name, target_types, prop_set, sources):
-
+
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(project, ProjectTarget)
+ assert isinstance(name, basestring) or name is None
+ assert is_iterable_typed(target_types, basestring)
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+
result = []
usage_requirements = property_set.empty()
-
+
for t in target_types:
r = construct (project, name, t, prop_set, sources)
@@ -870,7 +970,7 @@ def construct_types (project, name, target_types, prop_set, sources):
# TODO: have to introduce parameter controlling if
# several types can be matched and add appropriate
- # checks
+ # checks
# TODO: need to review the documentation for
# 'construct' to see if it should return $(source) even
@@ -883,9 +983,10 @@ def construct_types (project, name, target_types, prop_set, sources):
return (usage_requirements, sources)
def __ensure_type (targets):
- """ Ensures all 'targets' have types. If this is not so, exists with
+ """ Ensures all 'targets' have types. If this is not so, exists with
error.
"""
+ assert is_iterable_typed(targets, virtual_target.VirtualTarget)
for t in targets:
if not t.type ():
get_manager().errors()("target '%s' has no type" % str (t))
@@ -898,24 +999,26 @@ def find_viable_generators_aux (target_type, prop_set):
- for each type find all generators that generate that type and which requirements
are satisfied by properties.
- if the set of generators is not empty, returns that set.
-
+
Note: this algorithm explicitly ignores generators for base classes if there's
at least one generator for requested target_type.
"""
+ assert isinstance(target_type, basestring)
+ assert isinstance(prop_set, property_set.PropertySet)
# Select generators that can create the required target type.
viable_generators = []
initial_generators = []
- import type
+ from . import type
# Try all-type generators first. Assume they have
# quite specific requirements.
all_bases = type.all_bases(target_type)
-
+
for t in all_bases:
-
+
initial_generators = __type_to_generators.get(t, [])
-
+
if initial_generators:
dout("there are generators for this type")
if t != target_type:
@@ -933,22 +1036,24 @@ def find_viable_generators_aux (target_type, prop_set):
ng = g.clone_and_change_target_type(t, target_type)
generators2.append(ng)
register(ng)
-
+
initial_generators = generators2
break
-
+
for g in initial_generators:
dout("trying generator " + g.id()
+ "(" + str(g.source_types()) + "->" + str(g.target_types()) + ")")
-
+
m = g.match_rank(prop_set)
if m:
dout(" is viable")
- viable_generators.append(g)
-
+ viable_generators.append(g)
+
return viable_generators
def find_viable_generators (target_type, prop_set):
+ assert isinstance(target_type, basestring)
+ assert isinstance(prop_set, property_set.PropertySet)
key = target_type + '.' + str (prop_set)
l = __viable_generators_cache.get (key, None)
@@ -971,15 +1076,15 @@ def find_viable_generators (target_type, prop_set):
# Generators which override 'all'.
all_overrides = []
-
+
# Generators which are overriden
- overriden_ids = []
+ overriden_ids = []
for g in viable_generators:
id = g.id ()
-
+
this_overrides = __overrides.get (id, [])
-
+
if this_overrides:
overriden_ids.extend (this_overrides)
if 'all' in this_overrides:
@@ -989,24 +1094,31 @@ def find_viable_generators (target_type, prop_set):
viable_generators = all_overrides
return [g for g in viable_generators if not g.id() in overriden_ids]
-
+
def __construct_really (project, name, target_type, prop_set, sources):
""" Attempts to construct target by finding viable generators, running them
and selecting the dependency graph.
"""
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(project, ProjectTarget)
+ assert isinstance(name, basestring) or name is None
+ assert isinstance(target_type, basestring)
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
viable_generators = find_viable_generators (target_type, prop_set)
-
+
result = []
dout(" *** %d viable generators" % len (viable_generators))
generators_that_succeeded = []
-
+
for g in viable_generators:
- __active_generators.append(g)
+ __active_generators.append(g)
r = try_one_generator (project, name, g, target_type, prop_set, sources)
del __active_generators[-1]
-
+
if r:
generators_that_succeeded.append(g)
if result:
@@ -1027,7 +1139,7 @@ def __construct_really (project, name, target_type, prop_set, sources):
get_manager().errors()(output.getvalue())
else:
result = r;
-
+
return result;
@@ -1036,19 +1148,26 @@ def construct (project, name, target_type, prop_set, sources, top_level=False):
from 'sources'. The 'sources' are treated as a collection of
*possible* ingridients -- i.e. it is not required to consume
them all. If 'multiple' is true, the rule is allowed to return
- several targets of 'target-type'.
-
+ several targets of 'target-type'.
+
Returns a list of target. When this invocation is first instance of
'construct' in stack, returns only targets of requested 'target-type',
otherwise, returns also unused sources and additionally generated
targets.
-
+
If 'top-level' is set, does not suppress generators that are already
used in the stack. This may be useful in cases where a generator
has to build a metatarget -- for example a target corresponding to
- built tool.
+ built tool.
"""
-
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(project, ProjectTarget)
+ assert isinstance(name, basestring) or name is None
+ assert isinstance(target_type, basestring)
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+ assert isinstance(top_level, bool)
global __active_generators
if top_level:
saved_active = __active_generators
@@ -1057,23 +1176,23 @@ def construct (project, name, target_type, prop_set, sources, top_level=False):
global __construct_stack
if not __construct_stack:
__ensure_type (sources)
-
+
__construct_stack.append (1)
increase_indent ()
if project.manager().logger().on():
dout( "*** construct " + target_type)
-
+
for s in sources:
dout(" from " + str(s))
project.manager().logger().log (__name__, " properties: ", prop_set.raw ())
-
+
result = __construct_really(project, name, target_type, prop_set, sources)
decrease_indent()
-
+
__construct_stack = __construct_stack [1:]
if top_level:
@@ -1086,7 +1205,7 @@ def add_usage_requirements (result, raw_properties):
if isinstance (result[0], property_set.PropertySet):
return (result[0].add_raw(raw_properties), result[1])
else:
- return (propery_set.create(raw-properties), result)
+ return (property_set.create(raw_properties), result)
#if [ class.is-a $(result[1]) : property-set ]
#{
# return [ $(result[1]).add-raw $(raw-properties) ] $(result[2-]) ;
diff --git a/tools/build/src/build/project.py b/tools/build/src/build/project.py
index 71bc33fb38..ea8fe0106e 100644
--- a/tools/build/src/build/project.py
+++ b/tools/build/src/build/project.py
@@ -40,9 +40,10 @@
# their project id.
import b2.util.path
+import b2.build.targets
from b2.build import property_set, property
from b2.build.errors import ExceptionWithUserContext
-import b2.build.targets
+from b2.manager import get_manager
import bjam
import b2
@@ -56,7 +57,10 @@ import imp
import traceback
import b2.util.option as option
-from b2.util import record_jam_to_value_mapping, qualify_jam_action
+from b2.util import (
+ record_jam_to_value_mapping, qualify_jam_action, is_iterable_typed, bjam_signature,
+ is_iterable)
+
class ProjectRegistry:
@@ -130,6 +134,7 @@ class ProjectRegistry:
file and jamfile needed by the loaded one will be loaded recursively.
If the jamfile at that location is loaded already, does nothing.
Returns the project module for the Jamfile."""
+ assert isinstance(jamfile_location, basestring)
absolute = os.path.join(os.getcwd(), jamfile_location)
absolute = os.path.normpath(absolute)
@@ -159,6 +164,7 @@ class ProjectRegistry:
return mname
def load_used_projects(self, module_name):
+ assert isinstance(module_name, basestring)
# local used = [ modules.peek $(module-name) : .used-projects ] ;
used = self.used_projects[module_name]
@@ -172,7 +178,7 @@ class ProjectRegistry:
def load_parent(self, location):
"""Loads parent of Jamfile at 'location'.
Issues an error if nothing is found."""
-
+ assert isinstance(location, basestring)
found = b2.util.path.glob_in_parents(
location, self.JAMROOT + self.JAMFILE)
@@ -187,6 +193,8 @@ class ProjectRegistry:
"""Given 'name' which can be project-id or plain directory name,
return project module corresponding to that id or directory.
Returns nothing of project is not found."""
+ assert isinstance(name, basestring)
+ assert isinstance(current_location, basestring)
project_module = None
@@ -214,6 +222,7 @@ class ProjectRegistry:
"""Returns the name of module corresponding to 'jamfile-location'.
If no module corresponds to location yet, associates default
module name with that location."""
+ assert isinstance(jamfile_location, basestring)
module = self.location2module.get(jamfile_location)
if not module:
# Root the path, so that locations are always umbiguious.
@@ -230,6 +239,9 @@ class ProjectRegistry:
exact names of all the Jamfiles in the given directory. The optional
parent-root argument causes this to search not the given directory
but the ones above it up to the directory given in it."""
+ assert isinstance(dir, basestring)
+ assert isinstance(parent_root, (int, bool))
+ assert isinstance(no_errors, (int, bool))
# Glob for all the possible Jamfiles according to the match pattern.
#
@@ -280,6 +292,8 @@ Please consult the documentation at 'http://boost.org/boost-build2'."""
"""Load a Jamfile at the given directory. Returns nothing.
Will attempt to load the file as indicated by the JAMFILE patterns.
Effect of calling this rule twice with the same 'dir' is underfined."""
+ assert isinstance(dir, basestring)
+ assert isinstance(jamfile_module, basestring)
# See if the Jamfile is where it should be.
is_jamroot = False
@@ -359,12 +373,15 @@ actual value %s""" % (jamfile_module, saved_project, self.current_project))
The caller is required to never call this method twice on
the same file.
"""
+ assert isinstance(jamfile_module, basestring)
+ assert isinstance(file, basestring)
self.used_projects[jamfile_module] = []
bjam.call("load", jamfile_module, file)
self.load_used_projects(jamfile_module)
def is_jamroot(self, basename):
+ assert isinstance(basename, basestring)
match = [ pat for pat in self.JAMROOT if re.match(pat, basename)]
if match:
return 1
@@ -378,7 +395,9 @@ actual value %s""" % (jamfile_module, saved_project, self.current_project))
location is the location (directory) of the project to initialize.
If not specified, standalone project will be initialized
"""
-
+ assert isinstance(module_name, basestring)
+ assert isinstance(location, basestring) or location is None
+ assert isinstance(basename, basestring) or basename is None
if "--debug-loading" in self.manager.argv():
print "Initializing project '%s'" % module_name
@@ -465,6 +484,8 @@ actual value %s""" % (jamfile_module, saved_project, self.current_project))
def inherit_attributes(self, project_module, parent_module):
"""Make 'project-module' inherit attributes of project
root and parent module."""
+ assert isinstance(project_module, basestring)
+ assert isinstance(parent_module, basestring)
attributes = self.module2attributes[project_module]
pattributes = self.module2attributes[parent_module]
@@ -502,6 +523,8 @@ actual value %s""" % (jamfile_module, saved_project, self.current_project))
def register_id(self, id, module):
"""Associate the given id with the given project module."""
+ assert isinstance(id, basestring)
+ assert isinstance(module, basestring)
self.id2module[id] = module
def current(self):
@@ -509,11 +532,17 @@ actual value %s""" % (jamfile_module, saved_project, self.current_project))
return self.current_project
def set_current(self, c):
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(c, ProjectTarget)
self.current_project = c
def push_current(self, project):
"""Temporary changes the current project to 'project'. Should
be followed by 'pop-current'."""
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(project, ProjectTarget)
self.saved_current_project.append(self.current_project)
self.current_project = project
@@ -524,11 +553,14 @@ actual value %s""" % (jamfile_module, saved_project, self.current_project))
def attributes(self, project):
"""Returns the project-attribute instance for the
specified jamfile module."""
+ assert isinstance(project, basestring)
return self.module2attributes[project]
def attribute(self, project, attribute):
"""Returns the value of the specified attribute in the
specified jamfile module."""
+ assert isinstance(project, basestring)
+ assert isinstance(attribute, basestring)
try:
return self.module2attributes[project].get(attribute)
except:
@@ -537,10 +569,14 @@ actual value %s""" % (jamfile_module, saved_project, self.current_project))
def attributeDefault(self, project, attribute, default):
"""Returns the value of the specified attribute in the
specified jamfile module."""
+ assert isinstance(project, basestring)
+ assert isinstance(attribute, basestring)
+ assert isinstance(default, basestring) or default is None
return self.module2attributes[project].getDefault(attribute, default)
def target(self, project_module):
"""Returns the project target corresponding to the 'project-module'."""
+ assert isinstance(project_module, basestring)
if not self.module2target.has_key(project_module):
self.module2target[project_module] = \
b2.build.targets.ProjectTarget(project_module, project_module,
@@ -550,6 +586,8 @@ actual value %s""" % (jamfile_module, saved_project, self.current_project))
def use(self, id, location):
# Use/load a project.
+ assert isinstance(id, basestring)
+ assert isinstance(location, basestring)
saved_project = self.current_project
project_module = self.load(location)
declared_id = self.attributeDefault(project_module, "id", "")
@@ -564,16 +602,24 @@ actual value %s""" % (jamfile_module, saved_project, self.current_project))
self.current_module = saved_project
- def add_rule(self, name, callable):
+ def add_rule(self, name, callable_):
"""Makes rule 'name' available to all subsequently loaded Jamfiles.
Calling that rule wil relay to 'callable'."""
- self.project_rules_.add_rule(name, callable)
+ assert isinstance(name, basestring)
+ assert callable(callable_)
+ self.project_rules_.add_rule(name, callable_)
def project_rules(self):
return self.project_rules_
def glob_internal(self, project, wildcards, excludes, rule_name):
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(project, ProjectTarget)
+ assert is_iterable_typed(wildcards, basestring)
+ assert is_iterable_typed(excludes, basestring) or excludes is None
+ assert isinstance(rule_name, basestring)
location = project.get("source-location")[0]
result = []
@@ -656,6 +702,8 @@ actual value %s""" % (jamfile_module, saved_project, self.current_project))
since then we might get naming conflicts between standard
Python modules and those.
"""
+ assert isinstance(name, basestring)
+ assert is_iterable_typed(extra_path, basestring) or extra_path is None
# See if we loaded module of this name already
existing = self.loaded_tool_modules_.get(name)
if existing:
@@ -774,7 +822,20 @@ class ProjectAttributes:
def set(self, attribute, specification, exact=False):
"""Set the named attribute from the specification given by the user.
The value actually set may be different."""
-
+ assert isinstance(attribute, basestring)
+ assert isinstance(exact, (int, bool))
+ if __debug__ and not exact:
+ if attribute == 'requirements':
+ assert (isinstance(specification, property_set.PropertySet)
+ or all(isinstance(s, basestring) for s in specification))
+ elif attribute in (
+ 'usage-requirements', 'default-build', 'source-location', 'build-dir', 'id'):
+ assert is_iterable_typed(specification, basestring)
+ elif __debug__:
+ assert (
+ isinstance(specification, (property_set.PropertySet, type(None), basestring))
+ or all(isinstance(s, basestring) for s in specification)
+ )
if exact:
self.__dict__[attribute] = specification
@@ -838,9 +899,11 @@ for project at '%s'""" % (attribute, self.location))
self.__dict__[attribute] = specification
def get(self, attribute):
+ assert isinstance(attribute, basestring)
return self.__dict__[attribute]
def getDefault(self, attribute, default):
+ assert isinstance(attribute, basestring)
return self.__dict__.get(attribute, default)
def dump(self):
@@ -876,41 +939,51 @@ class ProjectRules:
"error_reporting_wrapper", "add_rule_for_type", "reverse"]]
self.all_names_ = [x for x in self.local_names]
- def _import_rule(self, bjam_module, name, callable):
- if hasattr(callable, "bjam_signature"):
- bjam.import_rule(bjam_module, name, self.make_wrapper(callable), callable.bjam_signature)
+ def _import_rule(self, bjam_module, name, callable_):
+ assert isinstance(bjam_module, basestring)
+ assert isinstance(name, basestring)
+ assert callable(callable_)
+ if hasattr(callable_, "bjam_signature"):
+ bjam.import_rule(bjam_module, name, self.make_wrapper(callable_), callable_.bjam_signature)
else:
- bjam.import_rule(bjam_module, name, self.make_wrapper(callable))
+ bjam.import_rule(bjam_module, name, self.make_wrapper(callable_))
def add_rule_for_type(self, type):
+ assert isinstance(type, basestring)
rule_name = type.lower().replace("_", "-")
- def xpto (name, sources = [], requirements = [], default_build = [], usage_requirements = []):
+ @bjam_signature([['name'], ['sources', '*'], ['requirements', '*'],
+ ['default_build', '*'], ['usage_requirements', '*']])
+ def xpto (name, sources=[], requirements=[], default_build=[], usage_requirements=[]):
+
return self.manager_.targets().create_typed_target(
- type, self.registry.current(), name[0], sources,
+ type, self.registry.current(), name, sources,
requirements, default_build, usage_requirements)
self.add_rule(rule_name, xpto)
- def add_rule(self, name, callable):
- self.rules[name] = callable
+ def add_rule(self, name, callable_):
+ assert isinstance(name, basestring)
+ assert callable(callable_)
+ self.rules[name] = callable_
self.all_names_.append(name)
# Add new rule at global bjam scope. This might not be ideal,
# added because if a jamroot does 'import foo' where foo calls
# add_rule, we need to import new rule to jamroot scope, and
# I'm lazy to do this now.
- self._import_rule("", name, callable)
+ self._import_rule("", name, callable_)
def all_names(self):
return self.all_names_
- def call_and_report_errors(self, callable, *args, **kw):
+ def call_and_report_errors(self, callable_, *args, **kw):
+ assert callable(callable_)
result = None
try:
self.manager_.errors().push_jamfile_context()
- result = callable(*args, **kw)
+ result = callable_(*args, **kw)
except ExceptionWithUserContext, e:
e.report()
except Exception, e:
@@ -923,16 +996,18 @@ class ProjectRules:
return result
- def make_wrapper(self, callable):
+ def make_wrapper(self, callable_):
"""Given a free-standing function 'callable', return a new
callable that will call 'callable' and report all exceptins,
using 'call_and_report_errors'."""
+ assert callable(callable_)
def wrapper(*args, **kw):
- return self.call_and_report_errors(callable, *args, **kw)
+ return self.call_and_report_errors(callable_, *args, **kw)
return wrapper
def init_project(self, project_module, python_standalone=False):
-
+ assert isinstance(project_module, basestring)
+ assert isinstance(python_standalone, bool)
if python_standalone:
m = sys.modules[project_module]
@@ -961,7 +1036,7 @@ class ProjectRules:
self._import_rule(project_module, n, self.rules[n])
def project(self, *args):
-
+ assert is_iterable(args) and all(is_iterable(arg) for arg in args)
jamfile_module = self.registry.current().project_module()
attributes = self.registry.attributes(jamfile_module)
@@ -1017,7 +1092,8 @@ attribute is allowed only for top-level 'project' invocations""")
"""Declare and set a project global constant.
Project global constants are normal variables but should
not be changed. They are applied to every child Jamfile."""
- m = "Jamfile</home/ghost/Work/Boost/boost-svn/tools/build/v2_python/python/tests/bjam/make>"
+ assert is_iterable_typed(name, basestring)
+ assert is_iterable_typed(value, basestring)
self.registry.current().add_constant(name[0], value)
def path_constant(self, name, value):
@@ -1025,6 +1101,8 @@ attribute is allowed only for top-level 'project' invocations""")
path is adjusted to be relative to the invocation directory. The given
value path is taken to be either absolute, or relative to this project
root."""
+ assert is_iterable_typed(name, basestring)
+ assert is_iterable_typed(value, basestring)
if len(value) > 1:
self.registry.manager.error()("path constant should have one element")
self.registry.current().add_constant(name[0], value[0], path=1)
@@ -1032,27 +1110,35 @@ attribute is allowed only for top-level 'project' invocations""")
def use_project(self, id, where):
# See comment in 'load' for explanation why we record the
# parameters as opposed to loading the project now.
- m = self.registry.current().project_module();
+ assert is_iterable_typed(id, basestring)
+ assert is_iterable_typed(where, basestring)
+ m = self.registry.current().project_module()
self.registry.used_projects[m].append((id[0], where[0]))
def build_project(self, dir):
- assert(isinstance(dir, list))
+ assert is_iterable_typed(dir, basestring)
jamfile_module = self.registry.current().project_module()
attributes = self.registry.attributes(jamfile_module)
now = attributes.get("projects-to-build")
attributes.set("projects-to-build", now + dir, exact=True)
def explicit(self, target_names):
+ assert is_iterable_typed(target_names, basestring)
self.registry.current().mark_targets_as_explicit(target_names)
def always(self, target_names):
+ assert is_iterable_typed(target_names, basestring)
self.registry.current().mark_targets_as_alays(target_names)
def glob(self, wildcards, excludes=None):
+ assert is_iterable_typed(wildcards, basestring)
+ assert is_iterable_typed(excludes, basestring)or excludes is None
return self.registry.glob_internal(self.registry.current(),
wildcards, excludes, "glob")
def glob_tree(self, wildcards, excludes=None):
+ assert is_iterable_typed(wildcards, basestring)
+ assert is_iterable_typed(excludes, basestring) or excludes is None
bad = 0
for p in wildcards:
if os.path.dirname(p):
@@ -1076,6 +1162,7 @@ attribute is allowed only for top-level 'project' invocations""")
# will expect the module to be found even though
# the directory is not in BOOST_BUILD_PATH.
# So temporary change the search path.
+ assert is_iterable_typed(toolset, basestring)
current = self.registry.current()
location = current.get('location')
@@ -1090,7 +1177,9 @@ attribute is allowed only for top-level 'project' invocations""")
self.registry.set_current(current)
def import_(self, name, names_to_import=None, local_names=None):
-
+ assert is_iterable_typed(name, basestring)
+ assert is_iterable_typed(names_to_import, basestring) or names_to_import is None
+ assert is_iterable_typed(local_names, basestring)or local_names is None
name = name[0]
py_name = name
if py_name == "os":
@@ -1133,7 +1222,8 @@ attribute is allowed only for top-level 'project' invocations""")
lib x : x.cpp : [ conditional <toolset>gcc <variant>debug :
<define>DEBUG_EXCEPTION <define>DEBUG_TRACE ] ;
"""
-
+ assert is_iterable_typed(condition, basestring)
+ assert is_iterable_typed(requirements, basestring)
c = string.join(condition, ",")
if c.find(":") != -1:
return [c + r for r in requirements]
@@ -1141,6 +1231,8 @@ attribute is allowed only for top-level 'project' invocations""")
return [c + ":" + r for r in requirements]
def option(self, name, value):
+ assert is_iterable(name) and isinstance(name[0], basestring)
+ assert is_iterable(value) and isinstance(value[0], basestring)
name = name[0]
if not name in ["site-config", "user-config", "project-config"]:
get_manager().errors()("The 'option' rule may be used only in site-config or user-config")
diff --git a/tools/build/src/build/property.py b/tools/build/src/build/property.py
index f851c9e5ef..11a18ff385 100644
--- a/tools/build/src/build/property.py
+++ b/tools/build/src/build/property.py
@@ -1,17 +1,17 @@
# Status: ported, except for tests.
# Base revision: 64070
#
-# Copyright 2001, 2002, 2003 Dave Abrahams
-# Copyright 2006 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2001, 2002, 2003 Dave Abrahams
+# Copyright 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
import re
import sys
from b2.util.utility import *
from b2.build import feature
-from b2.util import sequence, qualify_jam_action
+from b2.util import sequence, qualify_jam_action, is_iterable_typed
import b2.util.set
from b2.manager import get_manager
@@ -41,7 +41,7 @@ class Property(object):
self._feature = f
self._value = value
self._condition = condition
-
+
def feature(self):
return self._feature
@@ -70,7 +70,9 @@ class Property(object):
def create_from_string(s, allow_condition=False,allow_missing_value=False):
-
+ assert isinstance(s, basestring)
+ assert isinstance(allow_condition, bool)
+ assert isinstance(allow_missing_value, bool)
condition = []
import types
if not isinstance(s, types.StringType):
@@ -92,7 +94,7 @@ def create_from_string(s, allow_condition=False,allow_missing_value=False):
if feature.is_implicit_value(s):
f = feature.implied_feature(s)
value = s
- else:
+ else:
raise get_manager().errors()("Invalid property '%s' -- unknown feature" % s)
else:
if feature.valid(feature_name):
@@ -119,11 +121,11 @@ def create_from_string(s, allow_condition=False,allow_missing_value=False):
if condition:
condition = [create_from_string(x) for x in condition.split(',')]
-
+
return Property(f, value, condition)
def create_from_strings(string_list, allow_condition=False):
-
+ assert is_iterable_typed(string_list, basestring)
return [create_from_string(s, allow_condition) for s in string_list]
def reset ():
@@ -153,7 +155,7 @@ def path_order (x, y):
"""
if x == y:
return 0
-
+
xg = get_grist (x)
yg = get_grist (y)
@@ -164,10 +166,10 @@ def path_order (x, y):
return 1
else:
- if not xg:
+ if not xg:
x = feature.expand_subfeatures([x])
y = feature.expand_subfeatures([y])
-
+
if x < y:
return -1
elif x > y:
@@ -176,21 +178,23 @@ def path_order (x, y):
return 0
def identify(string):
- return string
+ return string
# Uses Property
def refine (properties, requirements):
- """ Refines 'properties' by overriding any non-free properties
- for which a different value is specified in 'requirements'.
+ """ Refines 'properties' by overriding any non-free properties
+ for which a different value is specified in 'requirements'.
Conditional requirements are just added without modification.
Returns the resulting list of properties.
"""
+ assert is_iterable_typed(properties, Property)
+ assert is_iterable_typed(requirements, Property)
# The result has no duplicates, so we store it in a set
result = set()
-
+
# Records all requirements.
required = {}
-
+
# All the elements of requirements should be present in the result
# Record them so that we can handle 'properties'.
for r in requirements:
@@ -224,14 +228,14 @@ def translate_paths (properties, path):
if p.feature().path():
values = __re_two_ampersands.split(p.value())
-
+
new_value = "&&".join(os.path.join(path, v) for v in values)
if new_value != p.value():
result.append(Property(p.feature(), new_value, p.condition()))
else:
result.append(p)
-
+
else:
result.append (p)
@@ -242,6 +246,8 @@ def translate_indirect(properties, context_module):
names of rules, used in 'context-module'. Such rules can be
either local to the module or global. Qualified local rules
with the name of the module."""
+ assert is_iterable_typed(properties, Property)
+ assert isinstance(context_module, basestring)
result = []
for p in properties:
if p.value()[0] == '@':
@@ -257,15 +263,14 @@ def validate (properties):
""" Exit with error if any of the properties is not valid.
properties may be a single property or a sequence of properties.
"""
-
- if isinstance (properties, str):
- __validate1 (properties)
- else:
- for p in properties:
- __validate1 (p)
+ if isinstance(properties, Property):
+ properties = [properties]
+ assert is_iterable_typed(properties, Property)
+ for p in properties:
+ __validate1(p)
def expand_subfeatures_in_conditions (properties):
-
+ assert is_iterable_typed(properties, Property)
result = []
for p in properties:
@@ -296,8 +301,9 @@ def split_conditional (property):
<variant>debug,<toolset>gcc <inlining>full.
Otherwise, returns empty string.
"""
+ assert isinstance(property, basestring)
m = __re_split_conditional.match (property)
-
+
if m:
return (m.group (1), '<' + m.group (2))
@@ -307,14 +313,18 @@ def split_conditional (property):
def select (features, properties):
""" Selects properties which correspond to any of the given features.
"""
+ assert is_iterable_typed(properties, basestring)
result = []
-
+
# add any missing angle brackets
features = add_grist (features)
return [p for p in properties if get_grist(p) in features]
def validate_property_sets (sets):
+ if __debug__:
+ from .property_set import PropertySet
+ assert is_iterable_typed(sets, PropertySet)
for s in sets:
validate(s.all())
@@ -323,6 +333,10 @@ def evaluate_conditionals_in_context (properties, context):
For those with met conditions, removes the condition. Properies
in conditions are looked up in 'context'
"""
+ if __debug__:
+ from .property_set import PropertySet
+ assert is_iterable_typed(properties, Property)
+ assert isinstance(context, PropertySet)
base = []
conditional = []
@@ -348,8 +362,11 @@ def change (properties, feature, value = None):
given feature replaced by the given value.
If 'value' is None the feature will be removed.
"""
+ assert is_iterable_typed(properties, basestring)
+ assert isinstance(feature, basestring)
+ assert isinstance(value, (basestring, type(None)))
result = []
-
+
feature = add_grist (feature)
for p in properties:
@@ -368,7 +385,8 @@ def change (properties, feature, value = None):
def __validate1 (property):
""" Exit with error if property is not valid.
- """
+ """
+ assert isinstance(property, Property)
msg = None
if not property.feature().free():
@@ -379,7 +397,7 @@ def __validate1 (property):
# Still to port.
# Original lines are prefixed with "# "
#
-#
+#
# import utility : ungrist ;
# import sequence : unique ;
# import errors : error ;
@@ -389,8 +407,8 @@ def __validate1 (property):
# import set ;
# import path ;
# import assert ;
-#
-#
+#
+#
# rule validate-property-sets ( property-sets * )
@@ -405,7 +423,10 @@ def __validate1 (property):
def remove(attributes, properties):
"""Returns a property sets which include all the elements
in 'properties' that do not have attributes listed in 'attributes'."""
-
+ if isinstance(attributes, basestring):
+ attributes = [attributes]
+ assert is_iterable_typed(attributes, basestring)
+ assert is_iterable_typed(properties, basestring)
result = []
for e in properties:
attributes_new = feature.attributes(get_grist(e))
@@ -424,6 +445,8 @@ def remove(attributes, properties):
def take(attributes, properties):
"""Returns a property set which include all
properties in 'properties' that have any of 'attributes'."""
+ assert is_iterable_typed(attributes, basestring)
+ assert is_iterable_typed(properties, basestring)
result = []
for e in properties:
if b2.util.set.intersection(attributes, feature.attributes(get_grist(e))):
@@ -431,7 +454,9 @@ def take(attributes, properties):
return result
def translate_dependencies(properties, project_id, location):
-
+ assert is_iterable_typed(properties, Property)
+ assert isinstance(project_id, basestring)
+ assert isinstance(location, basestring)
result = []
for p in properties:
@@ -447,10 +472,10 @@ def translate_dependencies(properties, project_id, location):
pass
else:
rooted = os.path.join(os.getcwd(), location, rooted)
-
+
result.append(Property(p.feature(), rooted + "//" + m.group(2), p.condition()))
-
- elif os.path.isabs(v):
+
+ elif os.path.isabs(v):
result.append(p)
else:
result.append(Property(p.feature(), project_id + "//" + v, p.condition()))
@@ -464,10 +489,12 @@ class PropertyMap:
def __init__ (self):
self.__properties = []
self.__values = []
-
+
def insert (self, properties, value):
""" Associate value with properties.
"""
+ assert is_iterable_typed(properties, basestring)
+ assert isinstance(value, basestring)
self.__properties.append(properties)
self.__values.append(value)
@@ -477,15 +504,18 @@ class PropertyMap:
subset has value assigned to it, return the
value for the longest subset, if it's unique.
"""
+ assert is_iterable_typed(properties, basestring)
return self.find_replace (properties)
def find_replace(self, properties, value=None):
+ assert is_iterable_typed(properties, basestring)
+ assert isinstance(value, (basestring, type(None)))
matches = []
match_ranks = []
-
+
for i in range(0, len(self.__properties)):
p = self.__properties[i]
-
+
if b2.util.set.contains (p, properties):
matches.append (i)
match_ranks.append(len(p))
@@ -499,7 +529,7 @@ class PropertyMap:
raise NoBestMatchingAlternative ()
best = best [0]
-
+
original = self.__values[best]
if value:
@@ -512,12 +542,12 @@ class PropertyMap:
# import errors : try catch ;
# import feature ;
# import feature : feature subfeature compose ;
-#
+#
# # local rules must be explicitly re-imported
# import property : path-order ;
-#
+#
# feature.prepare-test property-test-temp ;
-#
+#
# feature toolset : gcc : implicit symmetric ;
# subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4
# 3.0 3.0.1 3.0.2 : optional ;
@@ -526,98 +556,98 @@ class PropertyMap:
# feature optimization : on off ;
# feature variant : debug release : implicit composite symmetric ;
# feature rtti : on off : link-incompatible ;
-#
+#
# compose <variant>debug : <define>_DEBUG <optimization>off ;
# compose <variant>release : <define>NDEBUG <optimization>on ;
-#
+#
# import assert ;
# import "class" : new ;
-#
+#
# validate <toolset>gcc <toolset>gcc-3.0.1 : $(test-space) ;
-#
+#
# assert.result <toolset>gcc <rtti>off <define>FOO
# : refine <toolset>gcc <rtti>off
# : <define>FOO
# : $(test-space)
# ;
-#
+#
# assert.result <toolset>gcc <optimization>on
# : refine <toolset>gcc <optimization>off
# : <optimization>on
# : $(test-space)
# ;
-#
+#
# assert.result <toolset>gcc <rtti>off
# : refine <toolset>gcc : <rtti>off : $(test-space)
# ;
-#
+#
# assert.result <toolset>gcc <rtti>off <rtti>off:<define>FOO
-# : refine <toolset>gcc : <rtti>off <rtti>off:<define>FOO
+# : refine <toolset>gcc : <rtti>off <rtti>off:<define>FOO
# : $(test-space)
# ;
-#
-# assert.result <toolset>gcc:<define>foo <toolset>gcc:<define>bar
-# : refine <toolset>gcc:<define>foo : <toolset>gcc:<define>bar
+#
+# assert.result <toolset>gcc:<define>foo <toolset>gcc:<define>bar
+# : refine <toolset>gcc:<define>foo : <toolset>gcc:<define>bar
# : $(test-space)
# ;
-#
+#
# assert.result <define>MY_RELEASE
-# : evaluate-conditionals-in-context
+# : evaluate-conditionals-in-context
# <variant>release,<rtti>off:<define>MY_RELEASE
# : <toolset>gcc <variant>release <rtti>off
-#
+#
# ;
-#
+#
# try ;
# validate <feature>value : $(test-space) ;
# catch "Invalid property '<feature>value': unknown feature 'feature'." ;
-#
+#
# try ;
# validate <rtti>default : $(test-space) ;
# catch \"default\" is not a known value of feature <rtti> ;
-#
+#
# validate <define>WHATEVER : $(test-space) ;
-#
+#
# try ;
# validate <rtti> : $(test-space) ;
# catch "Invalid property '<rtti>': No value specified for feature 'rtti'." ;
-#
+#
# try ;
# validate value : $(test-space) ;
# catch "value" is not a value of an implicit feature ;
-#
-#
-# assert.result <rtti>on
+#
+#
+# assert.result <rtti>on
# : remove free implicit : <toolset>gcc <define>foo <rtti>on : $(test-space) ;
-#
-# assert.result <include>a
+#
+# assert.result <include>a
# : select include : <include>a <toolset>gcc ;
-#
-# assert.result <include>a
+#
+# assert.result <include>a
# : select include bar : <include>a <toolset>gcc ;
-#
+#
# assert.result <include>a <toolset>gcc
# : select include <bar> <toolset> : <include>a <toolset>gcc ;
-#
-# assert.result <toolset>kylix <include>a
+#
+# assert.result <toolset>kylix <include>a
# : change <toolset>gcc <include>a : <toolset> kylix ;
-#
-# # Test ordinary properties
-# assert.result
-# : split-conditional <toolset>gcc
+#
+# # Test ordinary properties
+# assert.result
+# : split-conditional <toolset>gcc
# ;
-#
+#
# # Test properties with ":"
# assert.result
# : split-conditional <define>FOO=A::B
# ;
-#
+#
# # Test conditional feature
# assert.result <toolset>gcc,<toolset-gcc:version>3.0 <define>FOO
# : split-conditional <toolset>gcc,<toolset-gcc:version>3.0:<define>FOO
# ;
-#
+#
# feature.finish-test property-test-temp ;
# }
-#
-
+#
+
diff --git a/tools/build/src/build/property_set.py b/tools/build/src/build/property_set.py
index 37fe466313..494a5b1b70 100644
--- a/tools/build/src/build/property_set.py
+++ b/tools/build/src/build/property_set.py
@@ -8,6 +8,7 @@
import hashlib
+import bjam
from b2.util.utility import *
import property, feature
import b2.build.feature
@@ -15,7 +16,7 @@ from b2.exceptions import *
from b2.build.property import get_abbreviated_paths
from b2.util.sequence import unique
from b2.util.set import difference
-from b2.util import cached, abbreviate_dashed
+from b2.util import cached, abbreviate_dashed, is_iterable_typed
from b2.manager import get_manager
@@ -36,6 +37,8 @@ def create (raw_properties = []):
""" Creates a new 'PropertySet' instance for the given raw properties,
or returns an already existing one.
"""
+ assert (is_iterable_typed(raw_properties, property.Property)
+ or is_iterable_typed(raw_properties, basestring))
# FIXME: propagate to callers.
if len(raw_properties) > 0 and isinstance(raw_properties[0], property.Property):
x = raw_properties
@@ -58,6 +61,7 @@ def create_with_validation (raw_properties):
that all properties are valid and converting implicit
properties into gristed form.
"""
+ assert is_iterable_typed(raw_properties, basestring)
properties = [property.create_from_string(s) for s in raw_properties]
property.validate(properties)
@@ -71,7 +75,9 @@ def empty ():
def create_from_user_input(raw_properties, jamfile_module, location):
"""Creates a property-set from the input given by the user, in the
context of 'jamfile-module' at 'location'"""
-
+ assert is_iterable_typed(raw_properties, basestring)
+ assert isinstance(jamfile_module, basestring)
+ assert isinstance(location, basestring)
properties = property.create_from_strings(raw_properties, True)
properties = property.translate_paths(properties, location)
properties = property.translate_indirect(properties, jamfile_module)
@@ -95,7 +101,10 @@ def refine_from_user_input(parent_requirements, specification, jamfile_module,
- project-module -- the module to which context indirect features
will be bound.
- location -- the path to which path features are relative."""
-
+ assert isinstance(parent_requirements, PropertySet)
+ assert is_iterable_typed(specification, basestring)
+ assert isinstance(jamfile_module, basestring)
+ assert isinstance(location, basestring)
if not specification:
return parent_requirements
@@ -146,7 +155,7 @@ class PropertySet:
caching whenever possible.
"""
def __init__ (self, properties = []):
-
+ assert is_iterable_typed(properties, property.Property)
raw_properties = []
for p in properties:
@@ -304,6 +313,7 @@ class PropertySet:
return self.subfeatures_
def evaluate_conditionals(self, context=None):
+ assert isinstance(context, (PropertySet, type(None)))
if not context:
context = self
@@ -410,6 +420,7 @@ class PropertySet:
""" Creates a new property set containing the properties in this one,
plus the ones of the property set passed as argument.
"""
+ assert isinstance(ps, PropertySet)
if not self.added_.has_key(ps):
self.added_[ps] = create(self.all_ + ps.all())
return self.added_[ps]
@@ -428,6 +439,7 @@ class PropertySet:
feature = feature[0]
if not isinstance(feature, b2.build.feature.Feature):
feature = b2.build.feature.get(feature)
+ assert isinstance(feature, b2.build.feature.Feature)
if not self.feature_map_:
self.feature_map_ = {}
@@ -442,9 +454,9 @@ class PropertySet:
@cached
def get_properties(self, feature):
"""Returns all contained properties associated with 'feature'"""
-
if not isinstance(feature, b2.build.feature.Feature):
feature = b2.build.feature.get(feature)
+ assert isinstance(feature, b2.build.feature.Feature)
result = []
for p in self.all_:
@@ -454,7 +466,7 @@ class PropertySet:
def __contains__(self, item):
return item in self.all_set_
-
+
def hash(p):
m = hashlib.md5()
m.update(p)
diff --git a/tools/build/src/build/scanner.py b/tools/build/src/build/scanner.py
index 2a6bd66d74..ada5d83252 100644
--- a/tools/build/src/build/scanner.py
+++ b/tools/build/src/build/scanner.py
@@ -1,10 +1,10 @@
# Status: ported.
# Base revision: 45462
-#
-# Copyright 2003 Dave Abrahams
-# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+#
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Implements scanners: objects that compute implicit dependencies for
# files, such as includes in C++.
@@ -19,10 +19,10 @@
# then associated with actual targets. It is possible to use
# several scanners for a virtual-target. For example, a single source
# might be used by to compile actions, with different include paths.
-# In this case, two different actual targets will be created, each
+# In this case, two different actual targets will be created, each
# having scanner of its own.
#
-# Typically, scanners are created from target type and action's
+# Typically, scanners are created from target type and action's
# properties, using the rule 'get' in this module. Directly creating
# scanners is not recommended, because it might create many equvivalent
# but different instances, and lead in unneeded duplication of
@@ -34,6 +34,8 @@ import bjam
import os
from b2.exceptions import *
from b2.manager import get_manager
+from b2.util import is_iterable_typed
+
def reset ():
""" Clear the module state. This is mainly for testing purposes.
@@ -42,33 +44,37 @@ def reset ():
# Maps registered scanner classes to relevant properties
__scanners = {}
-
+
# A cache of scanners.
- # The key is: class_name.properties_tag, where properties_tag is the concatenation
+ # The key is: class_name.properties_tag, where properties_tag is the concatenation
# of all relevant properties, separated by '-'
__scanner_cache = {}
-
+
reset ()
def register(scanner_class, relevant_properties):
- """ Registers a new generator class, specifying a set of
+ """ Registers a new generator class, specifying a set of
properties relevant to this scanner. Ctor for that class
should have one parameter: list of properties.
"""
+ assert issubclass(scanner_class, Scanner)
+ assert isinstance(relevant_properties, basestring)
__scanners[str(scanner_class)] = relevant_properties
def registered(scanner_class):
""" Returns true iff a scanner of that class is registered
"""
return __scanners.has_key(str(scanner_class))
-
+
def get(scanner_class, properties):
""" Returns an instance of previously registered scanner
with the specified properties.
"""
+ assert issubclass(scanner_class, Scanner)
+ assert is_iterable_typed(properties, basestring)
scanner_name = str(scanner_class)
-
+
if not registered(scanner_name):
raise BaseException ("attempt to get unregisted scanner: %s" % scanner_name)
@@ -87,7 +93,7 @@ class Scanner:
"""
def __init__ (self):
pass
-
+
def pattern (self):
""" Returns a pattern to use for scanning.
"""
@@ -120,16 +126,19 @@ class CommonScanner(Scanner):
get_manager().scanners().propagate(self, matches)
class ScannerRegistry:
-
+
def __init__ (self, manager):
self.manager_ = manager
self.count_ = 0
self.exported_scanners_ = {}
def install (self, scanner, target, vtarget):
- """ Installs the specified scanner on actual target 'target'.
+ """ Installs the specified scanner on actual target 'target'.
vtarget: virtual target from which 'target' was actualized.
"""
+ assert isinstance(scanner, Scanner)
+ assert isinstance(target, basestring)
+ assert isinstance(vtarget, basestring)
engine = self.manager_.engine()
engine.set_target_variable(target, "HDRSCAN", scanner.pattern())
if not self.exported_scanners_.has_key(scanner):
@@ -141,8 +150,8 @@ class ScannerRegistry:
exported_name = self.exported_scanners_[scanner]
engine.set_target_variable(target, "HDRRULE", exported_name)
-
- # scanner reflects difference in properties affecting
+
+ # scanner reflects difference in properties affecting
# binding of 'target', which will be known when processing
# includes for it, will give information on how to
# interpret quoted includes.
@@ -150,6 +159,8 @@ class ScannerRegistry:
pass
def propagate(self, scanner, targets):
+ assert isinstance(scanner, Scanner)
+ assert is_iterable_typed(targets, basestring) or isinstance(targets, basestring)
engine = self.manager_.engine()
engine.set_target_variable(targets, "HDRSCAN", scanner.pattern())
engine.set_target_variable(targets, "HDRRULE",
diff --git a/tools/build/src/build/targets.py b/tools/build/src/build/targets.py
index acf10e4fdd..043d906660 100644
--- a/tools/build/src/build/targets.py
+++ b/tools/build/src/build/targets.py
@@ -10,10 +10,10 @@
# Supports 'abstract' targets, which are targets explicitly defined in Jamfile.
#
-# Abstract targets are represented by classes derived from 'AbstractTarget' class.
+# Abstract targets are represented by classes derived from 'AbstractTarget' class.
# The first abstract target is 'project_target', which is created for each
# Jamfile, and can be obtained by the 'target' rule in the Jamfile's module.
-# (see project.jam).
+# (see project.jam).
#
# Project targets keep a list of 'MainTarget' instances.
# A main target is what the user explicitly defines in a Jamfile. It is
@@ -36,34 +36,34 @@
# |AbstractTarget |
# +========================+
# |name |
-# |project |
-# | |
-# |generate(properties) = 0|
-# +-----------+------------+
-# |
-# ^
-# / \
-# +-+-+
-# |
-# |
-# +------------------------+------+------------------------------+
-# | | |
-# | | |
-# +----------+-----------+ +------+------+ +------+-------+
-# | project_target | | MainTarget | | BasicTarget |
-# +======================+ 1 * +=============+ alternatives +==============+
-# | generate(properties) |o-----------+ generate |<>------------->| generate |
+# |project |
+# | |
+# |generate(properties) = 0|
+# +-----------+------------+
+# |
+# ^
+# / \
+# +-+-+
+# |
+# |
+# +------------------------+------+------------------------------+
+# | | |
+# | | |
+# +----------+-----------+ +------+------+ +------+-------+
+# | project_target | | MainTarget | | BasicTarget |
+# +======================+ 1 * +=============+ alternatives +==============+
+# | generate(properties) |o-----------+ generate |<>------------->| generate |
# | main-target | +-------------+ | construct = 0|
-# +----------------------+ +--------------+
-# |
-# ^
-# / \
-# +-+-+
-# |
-# |
-# ...--+----------------+------------------+----------------+---+
-# | | | |
-# | | | |
+# +----------------------+ +--------------+
+# |
+# ^
+# / \
+# +-+-+
+# |
+# |
+# ...--+----------------+------------------+----------------+---+
+# | | | |
+# | | | |
# ... ---+-----+ +------+-------+ +------+------+ +--------+-----+
# | | TypedTarget | | make-target | | stage-target |
# . +==============+ +=============+ +==============+
@@ -81,7 +81,7 @@ import property, project, virtual_target, property_set, feature, generators, too
from virtual_target import Subvariant
from b2.exceptions import *
from b2.util.sequence import unique
-from b2.util import path, bjam_signature
+from b2.util import path, bjam_signature, safe_isinstance, is_iterable_typed
from b2.build.errors import user_error_checkpoint
import b2.build.build_request as build_request
@@ -90,7 +90,7 @@ import b2.util.set
_re_separate_target_from_properties = re.compile (r'^([^<]*)(/(<.*))?$')
class TargetRegistry:
-
+
def __init__ (self):
# All targets that are currently being built.
# Only the key is id (target), the value is the actual object.
@@ -107,6 +107,7 @@ class TargetRegistry:
""" Registers the specified target as a main target alternatives.
Returns 'target'.
"""
+ assert isinstance(target, AbstractTarget)
target.project ().add_alternative (target)
return target
@@ -116,12 +117,15 @@ class TargetRegistry:
as main target instances, and the name of such targets are adjusted to
be '<name_of_this_target>__<name_of_source_target>'. Such renaming
is disabled is non-empty value is passed for 'no-renaming' parameter."""
+ assert is_iterable_typed(sources, basestring)
+ assert isinstance(main_target_name, basestring)
+ assert isinstance(no_renaming, (int, bool))
result = []
for t in sources:
t = b2.util.jam_to_value_maybe(t)
-
+
if isinstance (t, AbstractTarget):
name = t.name ()
@@ -131,7 +135,7 @@ class TargetRegistry:
# Inline targets are not built by default.
p = t.project()
- p.mark_targets_as_explicit([name])
+ p.mark_targets_as_explicit([name])
result.append(name)
else:
@@ -145,11 +149,12 @@ class TargetRegistry:
which are obtained by
- translating all specified property paths, and
- refining project requirements with the one specified for the target
-
+
'specification' are the properties xplicitly specified for a
main target
'project' is the project where the main taret is to be declared."""
-
+ assert is_iterable_typed(specification, basestring)
+ assert isinstance(project, ProjectTarget)
specification.extend(toolset.requirements())
requirements = property_set.refine_from_user_input(
@@ -166,6 +171,8 @@ class TargetRegistry:
specification: Use-properties explicitly specified for a main target
project: Project where the main target is to be declared
"""
+ assert is_iterable_typed(specification, basestring)
+ assert isinstance(project, ProjectTarget)
project_usage_requirements = project.get ('usage-requirements')
# We don't use 'refine-from-user-input' because I'm not sure if:
@@ -174,7 +181,7 @@ class TargetRegistry:
# are always free.
usage_requirements = property_set.create_from_user_input(
specification, project.project_module(), project.get("location"))
-
+
return project_usage_requirements.add (usage_requirements)
def main_target_default_build (self, specification, project):
@@ -184,6 +191,8 @@ class TargetRegistry:
specification: Default build explicitly specified for a main target
project: Project where the main target is to be declared
"""
+ assert is_iterable_typed(specification, basestring)
+ assert isinstance(project, ProjectTarget)
if specification:
return property_set.create_with_validation(specification)
else:
@@ -192,16 +201,18 @@ class TargetRegistry:
def start_building (self, main_target_instance):
""" Helper rules to detect cycles in main target references.
"""
+ assert isinstance(main_target_instance, MainTarget)
if self.targets_being_built_.has_key(id(main_target_instance)):
names = []
for t in self.targets_being_built_.values() + [main_target_instance]:
names.append (t.full_name())
-
+
get_manager().errors()("Recursion in main target references\n")
-
+
self.targets_being_built_[id(main_target_instance)] = main_target_instance
def end_building (self, main_target_instance):
+ assert isinstance(main_target_instance, MainTarget)
assert (self.targets_being_built_.has_key (id (main_target_instance)))
del self.targets_being_built_ [id (main_target_instance)]
@@ -211,6 +222,11 @@ class TargetRegistry:
'usage_requirements' are assumed to be in the form specified
by the user in Jamfile corresponding to 'project'.
"""
+ assert isinstance(type, basestring)
+ assert isinstance(project, ProjectTarget)
+ assert is_iterable_typed(sources, basestring)
+ assert is_iterable_typed(requirements, basestring)
+ assert is_iterable_typed(default_build, basestring)
return self.main_target_alternative (TypedTarget (name, project, type,
self.main_target_sources (sources, name),
self.main_target_requirements (requirements, project),
@@ -231,6 +247,7 @@ class TargetRegistry:
print self.indent_ + message
def push_target(self, target):
+ assert isinstance(target, AbstractTarget)
self.targets_.append(target)
def pop_target(self):
@@ -241,14 +258,15 @@ class TargetRegistry:
class GenerateResult:
-
+
def __init__ (self, ur=None, targets=None):
if not targets:
targets = []
-
+ assert isinstance(ur, property_set.PropertySet) or ur is None
+ assert is_iterable_typed(targets, virtual_target.VirtualTarget)
+
self.__usage_requirements = ur
self.__targets = targets
- assert all(isinstance(t, virtual_target.VirtualTarget) for t in targets)
if not self.__usage_requirements:
self.__usage_requirements = property_set.empty ()
@@ -258,10 +276,10 @@ class GenerateResult:
def targets (self):
return self.__targets
-
+
def extend (self, other):
assert (isinstance (other, GenerateResult))
-
+
self.__usage_requirements = self.__usage_requirements.add (other.usage_requirements ())
self.__targets.extend (other.targets ())
@@ -274,12 +292,13 @@ class AbstractTarget:
project: the project target to which this one belongs
manager:the manager object. If none, uses project.manager ()
"""
+ assert isinstance(name, basestring)
assert (isinstance (project, ProjectTarget))
# Note: it might seem that we don't need either name or project at all.
# However, there are places where we really need it. One example is error
# messages which should name problematic targets. Another is setting correct
# paths for sources and generated files.
-
+
# Why allow manager to be specified? Because otherwise project target could not derive
# from this class.
if manager:
@@ -288,47 +307,48 @@ class AbstractTarget:
self.manager_ = project.manager ()
self.name_ = name
- self.project_ = project
-
+ self.project_ = project
+
def manager (self):
return self.manager_
-
+
def name (self):
""" Returns the name of this target.
"""
return self.name_
-
+
def project (self):
""" Returns the project for this target.
"""
return self.project_
-
+
def location (self):
""" Return the location where the target was declared.
"""
return self.location_
-
+
def full_name (self):
""" Returns a user-readable name for this target.
"""
location = self.project ().get ('location')
return location + '/' + self.name_
-
+
def generate (self, property_set):
""" Takes a property set. Generates virtual targets for this abstract
target, using the specified properties, unless a different value of some
- feature is required by the target.
+ feature is required by the target.
On success, returns a GenerateResult instance with:
- a property_set with the usage requirements to be
- applied to dependents
+ applied to dependents
- a list of produced virtual targets, which may be
- empty.
+ empty.
If 'property_set' is empty, performs default build of this
target, in a way specific to derived class.
"""
raise BaseException ("method should be defined in derived classes")
-
+
def rename (self, new_name):
+ assert isinstance(new_name, basestring)
self.name_ = new_name
class ProjectTarget (AbstractTarget):
@@ -346,28 +366,32 @@ class ProjectTarget (AbstractTarget):
all alternatives are enumerated an main targets are created.
"""
def __init__ (self, manager, name, project_module, parent_project, requirements, default_build):
+ assert isinstance(project_module, basestring)
+ assert isinstance(parent_project, (ProjectTarget, type(None)))
+ assert isinstance(requirements, (type(None), property_set.PropertySet))
+ assert isinstance(default_build, (type(None), property_set.PropertySet))
AbstractTarget.__init__ (self, name, self, manager)
-
+
self.project_module_ = project_module
self.location_ = manager.projects().attribute (project_module, 'location')
self.requirements_ = requirements
self.default_build_ = default_build
-
+
self.build_dir_ = None
-
+
# A cache of IDs
self.ids_cache_ = {}
-
+
# True is main targets have already been built.
self.built_main_targets_ = False
-
+
# A list of the registered alternatives for this project.
self.alternatives_ = []
# A map from main target name to the target corresponding
# to it.
self.main_target_ = {}
-
+
# Targets marked as explicit.
self.explicit_targets_ = set()
@@ -388,8 +412,9 @@ class ProjectTarget (AbstractTarget):
# way to make 'make' work without this method.
def project_module (self):
return self.project_module_
-
+
def get (self, attribute):
+ assert isinstance(attribute, basestring)
return self.manager().projects().attribute(
self.project_module_, attribute)
@@ -404,16 +429,17 @@ class ProjectTarget (AbstractTarget):
def generate (self, ps):
""" Generates all possible targets contained in this project.
"""
+ assert isinstance(ps, property_set.PropertySet)
self.manager_.targets().log(
"Building project '%s' with '%s'" % (self.name (), str(ps)))
self.manager_.targets().increase_indent ()
-
+
result = GenerateResult ()
-
+
for t in self.targets_to_build ():
g = t.generate (ps)
result.extend (g)
-
+
self.manager_.targets().decrease_indent ()
return result
@@ -422,10 +448,10 @@ class ProjectTarget (AbstractTarget):
must be built when this project is built.
"""
result = []
-
+
if not self.built_main_targets_:
self.build_main_targets ()
-
+
# Collect all main targets here, except for "explicit" ones.
for n, t in self.main_target_.iteritems ():
if not t.name () in self.explicit_targets_:
@@ -435,29 +461,33 @@ class ProjectTarget (AbstractTarget):
self_location = self.get ('location')
for pn in self.get ('projects-to-build'):
result.append (self.find(pn + "/"))
-
+
return result
def mark_targets_as_explicit (self, target_names):
"""Add 'target' to the list of targets in this project
that should be build only by explicit request."""
-
+
# Record the name of the target, not instance, since this
# rule is called before main target instaces are created.
+ assert is_iterable_typed(target_names, basestring)
self.explicit_targets_.update(target_names)
def mark_targets_as_always(self, target_names):
+ assert is_iterable_typed(target_names, basestring)
self.always_targets_.update(target_names)
-
+
def add_alternative (self, target_instance):
""" Add new target alternative.
"""
+ assert isinstance(target_instance, AbstractTarget)
if self.built_main_targets_:
raise IllegalOperation ("add-alternative called when main targets are already created for project '%s'" % self.full_name ())
self.alternatives_.append (target_instance)
def main_target (self, name):
+ assert isinstance(name, basestring)
if not self.built_main_targets_:
self.build_main_targets()
@@ -465,17 +495,19 @@ class ProjectTarget (AbstractTarget):
def has_main_target (self, name):
"""Tells if a main target with the specified name exists."""
+ assert isinstance(name, basestring)
if not self.built_main_targets_:
self.build_main_targets()
return self.main_target_.has_key(name)
-
+
def create_main_target (self, name):
""" Returns a 'MainTarget' class instance corresponding to the 'name'.
"""
+ assert isinstance(name, basestring)
if not self.built_main_targets_:
self.build_main_targets ()
-
+
return self.main_targets_.get (name, None)
@@ -483,7 +515,9 @@ class ProjectTarget (AbstractTarget):
""" Find and return the target with the specified id, treated
relative to self.
"""
- result = None
+ assert isinstance(id, basestring)
+
+ result = None
current_location = self.get ('location')
__re_split_project_target = re.compile (r'(.*)//(.*)')
@@ -497,13 +531,13 @@ class ProjectTarget (AbstractTarget):
target_part = split.group (2)
project_registry = self.project_.manager ().projects ()
-
+
extra_error_message = ''
if project_part:
# There's explicit project part in id. Looks up the
# project and pass the request to it.
pm = project_registry.find (project_part, current_location)
-
+
if pm:
project_target = project_registry.target (pm)
result = project_target.find (target_part, no_error=1)
@@ -520,7 +554,7 @@ class ProjectTarget (AbstractTarget):
#
# After first build we'll have target 'test' in Jamfile and file
# 'test' on the disk. We need target to override the file.
-
+
result = None
if self.has_main_target(id):
result = self.main_target(id)
@@ -531,19 +565,21 @@ class ProjectTarget (AbstractTarget):
# File actually does not exist.
# Reset 'target' so that an error is issued.
result = None
-
+
if not result:
# Interpret id as project-id
project_module = project_registry.find (id, current_location)
if project_module:
result = project_registry.target (project_module)
-
+
return result
def find (self, id, no_error = False):
+ assert isinstance(id, basestring)
+ assert isinstance(no_error, int) # also matches bools
v = self.ids_cache_.get (id, None)
-
+
if not v:
v = self.find_really (id)
self.ids_cache_ [id] = v
@@ -553,10 +589,10 @@ class ProjectTarget (AbstractTarget):
raise BaseException ("Unable to find file or target named '%s'\nreferred from project at '%s'" % (id, self.get ('location')))
-
+
def build_main_targets (self):
self.built_main_targets_ = True
-
+
for a in self.alternatives_:
name = a.name ()
if not self.main_target_.has_key (name):
@@ -565,7 +601,7 @@ class ProjectTarget (AbstractTarget):
if name in self.always_targets_:
a.always()
-
+
self.main_target_ [name].add_alternative (a)
def add_constant(self, name, value, path=0):
@@ -576,17 +612,19 @@ class ProjectTarget (AbstractTarget):
the constant will be interpreted relatively
to the location of project.
"""
-
+ assert isinstance(name, basestring)
+ assert isinstance(value, basestring)
+ assert isinstance(path, int) # will also match bools
if path:
l = self.location_
if not l:
- # Project corresponding to config files do not have
+ # Project corresponding to config files do not have
# 'location' attribute, but do have source location.
# It might be more reasonable to make every project have
# a location and use some other approach to prevent buildable
# targets in config files, but that's for later.
- l = get('source-location')
-
+ l = self.get('source-location')
+
value = os.path.join(l, value)
# Now make the value absolute path. Constants should be in
# platform-native form.
@@ -596,12 +634,13 @@ class ProjectTarget (AbstractTarget):
bjam.call("set-variable", self.project_module(), name, value)
def inherit(self, parent_project):
+ assert isinstance(parent_project, ProjectTarget)
for c in parent_project.constants_:
# No need to pass the type. Path constants were converted to
# absolute paths already by parent.
self.add_constant(c, parent_project.constants_[c])
-
- # Import rules from parent
+
+ # Import rules from parent
this_module = self.project_module()
parent_module = parent_project.project_module()
@@ -612,20 +651,21 @@ class ProjectTarget (AbstractTarget):
if x not in self.manager().projects().project_rules().all_names()]
if user_rules:
bjam.call("import-rules-from-parent", parent_module, this_module, user_rules)
-
+
class MainTarget (AbstractTarget):
""" A named top-level target in Jamfile.
"""
def __init__ (self, name, project):
- AbstractTarget.__init__ (self, name, project)
+ AbstractTarget.__init__ (self, name, project)
self.alternatives_ = []
self.default_build_ = property_set.empty ()
-
+
def add_alternative (self, target):
""" Add a new alternative for this target.
"""
+ assert isinstance(target, AbstractTarget)
d = target.default_build ()
-
+
if self.alternatives_ and self.default_build_ != d:
get_manager().errors()("default build must be identical in all alternatives\n"
"main target is '%s'\n"
@@ -637,7 +677,7 @@ class MainTarget (AbstractTarget):
self.alternatives_.append (target)
- def __select_alternatives (self, property_set, debug):
+ def __select_alternatives (self, property_set_, debug):
""" Returns the best viable alternative for this property_set
See the documentation for selection rules.
# TODO: shouldn't this be 'alternative' (singular)?
@@ -647,14 +687,17 @@ class MainTarget (AbstractTarget):
# lib l : l.cpp : <variant>debug ;
# lib l : l_opt.cpp : <variant>release ;
# won't work unless we add default value <variant>debug.
- property_set = property_set.add_defaults ()
-
+ assert isinstance(property_set_, property_set.PropertySet)
+ assert isinstance(debug, int) # also matches bools
+
+ property_set_ = property_set_.add_defaults ()
+
# The algorithm: we keep the current best viable alternative.
# When we've got new best viable alternative, we compare it
- # with the current one.
+ # with the current one.
best = None
best_properties = None
-
+
if len (self.alternatives_) == 0:
return None
@@ -662,11 +705,11 @@ class MainTarget (AbstractTarget):
return self.alternatives_ [0]
if debug:
- print "Property set for selection:", property_set
+ print "Property set for selection:", property_set_
for v in self.alternatives_:
- properties = v.match (property_set, debug)
-
+ properties = v.match (property_set_, debug)
+
if properties is not None:
if not best:
best = v
@@ -689,8 +732,9 @@ class MainTarget (AbstractTarget):
return best
- def apply_default_build (self, property_set):
- return apply_default_build(property_set, self.default_build_)
+ def apply_default_build (self, property_set_):
+ assert isinstance(property_set_, property_set.PropertySet)
+ return apply_default_build(property_set_, self.default_build_)
def generate (self, ps):
""" Select an alternative for this main target, by finding all alternatives
@@ -698,23 +742,24 @@ class MainTarget (AbstractTarget):
longest requirements set.
Returns the result of calling 'generate' on that alternative.
"""
+ assert isinstance(ps, property_set.PropertySet)
self.manager_.targets ().start_building (self)
# We want composite properties in build request act as if
# all the properties it expands too are explicitly specified.
ps = ps.expand ()
-
+
all_property_sets = self.apply_default_build (ps)
result = GenerateResult ()
-
+
for p in all_property_sets:
result.extend (self.__generate_really (p))
self.manager_.targets ().end_building (self)
return result
-
+
def __generate_really (self, prop_set):
""" Generates the main target with the given property set
and returns a list which first element is property_set object
@@ -722,6 +767,7 @@ class MainTarget (AbstractTarget):
generated virtual target in other elements. It's possible
that no targets are generated.
"""
+ assert isinstance(prop_set, property_set.PropertySet)
best_alternative = self.__select_alternatives (prop_set, debug=0)
if not best_alternative:
@@ -732,24 +778,25 @@ class MainTarget (AbstractTarget):
% (self.full_name(),))
result = best_alternative.generate (prop_set)
-
+
# Now return virtual targets for the only alternative
return result
-
+
def rename(self, new_name):
+ assert isinstance(new_name, basestring)
AbstractTarget.rename(self, new_name)
for a in self.alternatives_:
a.rename(new_name)
class FileReference (AbstractTarget):
""" Abstract target which refers to a source file.
- This is artificial creature; it's usefull so that sources to
+ This is artificial creature; it's usefull so that sources to
a target can be represented as list of abstract target instances.
"""
def __init__ (self, manager, file, project):
AbstractTarget.__init__ (self, file, project)
self.file_location_ = None
-
+
def generate (self, properties):
return GenerateResult (None, [
self.manager_.virtual_targets ().from_file (
@@ -767,7 +814,7 @@ class FileReference (AbstractTarget):
# Returns the location of target. Needed by 'testing.jam'
if not self.file_location_:
source_location = self.project_.get('source-location')
-
+
for src_dir in source_location:
location = os.path.join(src_dir, self.name())
if os.path.isfile(location):
@@ -783,24 +830,26 @@ def resolve_reference(target_reference, project):
as properties explicitly specified for this reference.
"""
# Separate target name from properties override
+ assert isinstance(target_reference, basestring)
+ assert isinstance(project, ProjectTarget)
split = _re_separate_target_from_properties.match (target_reference)
if not split:
raise BaseException ("Invalid reference: '%s'" % target_reference)
-
+
id = split.group (1)
-
+
sproperties = []
-
+
if split.group (3):
sproperties = property.create_from_strings(feature.split(split.group(3)))
sproperties = feature.expand_composites(sproperties)
-
+
# Find the target
target = project.find (id)
-
+
return (target, property_set.create(sproperties))
-def generate_from_reference(target_reference, project, property_set):
+def generate_from_reference(target_reference, project, property_set_):
""" Attempts to generate the target given by target reference, which
can refer both to a main target or to a file.
Returns a list consisting of
@@ -810,13 +859,16 @@ def generate_from_reference(target_reference, project, property_set):
project: Project where the reference is made
property_set: Properties of the main target that makes the reference
"""
+ assert isinstance(target_reference, basestring)
+ assert isinstance(project, ProjectTarget)
+ assert isinstance(property_set_, property_set.PropertySet)
target, sproperties = resolve_reference(target_reference, project)
-
+
# Take properties which should be propagated and refine them
# with source-specific requirements.
- propagated = property_set.propagated()
+ propagated = property_set_.propagated()
rproperties = propagated.refine(sproperties)
-
+
return target.generate(rproperties)
@@ -828,14 +880,18 @@ class BasicTarget (AbstractTarget):
targets.
"""
def __init__ (self, name, project, sources, requirements = None, default_build = None, usage_requirements = None):
+ assert is_iterable_typed(sources, basestring)
+ assert isinstance(requirements, property_set.PropertySet)
+ assert isinstance(default_build, property_set.PropertySet)
+ assert isinstance(usage_requirements, property_set.PropertySet)
AbstractTarget.__init__ (self, name, project)
-
+
for s in sources:
if get_grist (s):
raise InvalidSource ("property '%s' found in the 'sources' parameter for '%s'" % (s, name))
-
+
self.sources_ = sources
-
+
if not requirements: requirements = property_set.empty ()
self.requirements_ = requirements
@@ -844,13 +900,13 @@ class BasicTarget (AbstractTarget):
if not usage_requirements: usage_requirements = property_set.empty ()
self.usage_requirements_ = usage_requirements
-
+
# A cache for resolved references
self.source_targets_ = None
-
+
# A cache for generated targets
self.generated_ = {}
-
+
# A cache for build requests
self.request_cache = {}
@@ -865,12 +921,12 @@ class BasicTarget (AbstractTarget):
def always(self):
self.always_ = True
-
+
def sources (self):
""" Returns the list of AbstractTargets which are used as sources.
The extra properties specified for sources are not represented.
The only used of this rule at the moment is the '--dump-tests'
- feature of the test system.
+ feature of the test system.
"""
if self.source_targets_ == None:
self.source_targets_ = []
@@ -881,7 +937,7 @@ class BasicTarget (AbstractTarget):
def requirements (self):
return self.requirements_
-
+
def default_build (self):
return self.default_build_
@@ -892,8 +948,10 @@ class BasicTarget (AbstractTarget):
"""
# For optimization, we add free unconditional requirements directly,
# without using complex algorithsm.
- # This gives the complex algorithm better chance of caching results.
+ # This gives the complex algorithm better chance of caching results.
# The exact effect of this "optimization" is no longer clear
+ assert isinstance(build_request, property_set.PropertySet)
+ assert isinstance(requirements, property_set.PropertySet)
free_unconditional = []
other = []
for p in requirements.all():
@@ -902,7 +960,7 @@ class BasicTarget (AbstractTarget):
else:
other.append(p)
other = property_set.create(other)
-
+
key = (build_request, other)
if not self.request_cache.has_key(key):
self.request_cache[key] = self.__common_properties2 (build_request, other)
@@ -910,8 +968,8 @@ class BasicTarget (AbstractTarget):
return self.request_cache[key].add_raw(free_unconditional)
# Given 'context' -- a set of already present properties, and 'requirements',
- # decide which extra properties should be applied to 'context'.
- # For conditional requirements, this means evaluating condition. For
+ # decide which extra properties should be applied to 'context'.
+ # For conditional requirements, this means evaluating condition. For
# indirect conditional requirements, this means calling a rule. Ordinary
# requirements are always applied.
#
@@ -920,20 +978,23 @@ class BasicTarget (AbstractTarget):
#
# <toolset>gcc:<variant>release <variant>release:<define>RELEASE
#
- # If 'what' is 'refined' returns context refined with new requirements.
+ # If 'what' is 'refined' returns context refined with new requirements.
# If 'what' is 'added' returns just the requirements that must be applied.
def evaluate_requirements(self, requirements, context, what):
- # Apply non-conditional requirements.
- # It's possible that that further conditional requirement change
+ # Apply non-conditional requirements.
+ # It's possible that that further conditional requirement change
# a value set by non-conditional requirements. For example:
#
# exe a : a.cpp : <threading>single <toolset>foo:<threading>multi ;
- #
+ #
# I'm not sure if this should be an error, or not, especially given that
#
- # <threading>single
+ # <threading>single
#
# might come from project's requirements.
+ assert isinstance(requirements, property_set.PropertySet)
+ assert isinstance(context, property_set.PropertySet)
+ assert isinstance(what, basestring)
unconditional = feature.expand(requirements.non_conditional())
context = context.refine(property_set.create(unconditional))
@@ -941,7 +1002,7 @@ class BasicTarget (AbstractTarget):
# We've collected properties that surely must be present in common
# properties. We now try to figure out what other properties
# should be added in order to satisfy rules (4)-(6) from the docs.
-
+
conditionals = property_set.create(requirements.conditional())
# It's supposed that #conditionals iterations
@@ -949,34 +1010,38 @@ class BasicTarget (AbstractTarget):
# direction.
max_iterations = len(conditionals.all()) +\
len(requirements.get("<conditional>")) + 1
-
+
added_requirements = []
current = context
-
+
# It's assumed that ordinary conditional requirements can't add
# <indirect-conditional> properties, and that rules referred
- # by <indirect-conditional> properties can't add new
+ # by <indirect-conditional> properties can't add new
# <indirect-conditional> properties. So the list of indirect conditionals
# does not change.
indirect = requirements.get("<conditional>")
-
+
ok = 0
for i in range(0, max_iterations):
e = conditionals.evaluate_conditionals(current).all()[:]
-
+
# Evaluate indirect conditionals.
for i in indirect:
+ new = None
i = b2.util.jam_to_value_maybe(i)
if callable(i):
# This is Python callable, yeah.
- e.extend(i(current))
+ new = i(current)
else:
# Name of bjam function. Because bjam is unable to handle
# list of Property, pass list of strings.
br = b2.util.call_jam_function(i[1:], [str(p) for p in current.all()])
if br:
- e.extend(property.create_from_strings(br))
+ new = property.create_from_strings(br)
+ if new:
+ new = property.translate_paths(new, self.project().location())
+ e.extend(new)
if e == added_requirements:
# If we got the same result, we've found final properties.
@@ -994,7 +1059,7 @@ class BasicTarget (AbstractTarget):
self.manager().errors()("Can't evaluate conditional properties "
+ str(conditionals))
-
+
if what == "added":
return property_set.create(unconditional + added_requirements)
elif what == "refined":
@@ -1009,57 +1074,62 @@ class BasicTarget (AbstractTarget):
# and expands to <foo2>bar2, but default value of <foo2> is not bar2,
# in which case it's not clear what to do.
#
+ assert isinstance(build_request, property_set.PropertySet)
+ assert isinstance(requirements, property_set.PropertySet)
build_request = build_request.add_defaults()
# Featured added by 'add-default' can be composite and expand
# to features without default values -- so they are not added yet.
# It could be clearer/faster to expand only newly added properties
# but that's not critical.
build_request = build_request.expand()
-
+
return self.evaluate_requirements(requirements, build_request,
"refined")
-
- def match (self, property_set, debug):
+
+ def match (self, property_set_, debug):
""" Returns the alternative condition for this alternative, if
the condition is satisfied by 'property_set'.
"""
# The condition is composed of all base non-conditional properties.
# It's not clear if we should expand 'self.requirements_' or not.
# For one thing, it would be nice to be able to put
- # <toolset>msvc-6.0
+ # <toolset>msvc-6.0
# in requirements.
- # On the other hand, if we have <variant>release in condition it
+ # On the other hand, if we have <variant>release in condition it
# does not make sense to require <optimization>full to be in
# build request just to select this variant.
+ assert isinstance(property_set_, property_set.PropertySet)
bcondition = self.requirements_.base ()
ccondition = self.requirements_.conditional ()
condition = b2.util.set.difference (bcondition, ccondition)
if debug:
print " next alternative: required properties:", [str(p) for p in condition]
-
- if b2.util.set.contains (condition, property_set.all()):
+
+ if b2.util.set.contains (condition, property_set_.all()):
if debug:
print " matched"
-
+
return condition
else:
return None
- def generate_dependency_targets (self, target_ids, property_set):
+ def generate_dependency_targets (self, target_ids, property_set_):
+ assert is_iterable_typed(target_ids, basestring)
+ assert isinstance(property_set_, property_set.PropertySet)
targets = []
usage_requirements = []
for id in target_ids:
-
- result = generate_from_reference(id, self.project_, property_set)
+
+ result = generate_from_reference(id, self.project_, property_set_)
targets += result.targets()
usage_requirements += result.usage_requirements().all()
- return (targets, usage_requirements)
-
+ return (targets, usage_requirements)
+
def generate_dependency_properties(self, properties, ps):
""" Takes a target reference, which might be either target id
or a dependency property, and generates that target using
@@ -1067,20 +1137,22 @@ class BasicTarget (AbstractTarget):
Returns a tuple (result, usage_requirements).
"""
+ assert is_iterable_typed(properties, property.Property)
+ assert isinstance(ps, property_set.PropertySet)
result_properties = []
usage_requirements = []
for p in properties:
-
+
result = generate_from_reference(p.value(), self.project_, ps)
for t in result.targets():
result_properties.append(property.Property(p.feature(), t))
-
+
usage_requirements += result.usage_requirements().all()
- return (result_properties, usage_requirements)
+ return (result_properties, usage_requirements)
+
-
@user_error_checkpoint
@@ -1089,9 +1161,10 @@ class BasicTarget (AbstractTarget):
and calls 'construct'. This method should not be
overridden.
"""
+ assert isinstance(ps, property_set.PropertySet)
self.manager_.errors().push_user_context(
"Generating target " + self.full_name(), self.user_context_)
-
+
if self.manager().targets().logging():
self.manager().targets().log(
"Building target '%s'" % self.name_)
@@ -1100,26 +1173,26 @@ class BasicTarget (AbstractTarget):
"Build request: '%s'" % str (ps.raw ()))
cf = self.manager().command_line_free_features()
self.manager().targets().log(
- "Command line free features: '%s'" % str (cf.raw ()))
+ "Command line free features: '%s'" % str (cf.raw ()))
self.manager().targets().log(
"Target requirements: %s'" % str (self.requirements().raw ()))
-
+
self.manager().targets().push_target(self)
if not self.generated_.has_key(ps):
# Apply free features form the command line. If user
- # said
+ # said
# define=FOO
# he most likely want this define to be set for all compiles.
- ps = ps.refine(self.manager().command_line_free_features())
+ ps = ps.refine(self.manager().command_line_free_features())
rproperties = self.common_properties (ps, self.requirements_)
self.manager().targets().log(
"Common properties are '%s'" % str (rproperties))
-
+
if rproperties.get("<build>") != ["no"]:
-
+
result = GenerateResult ()
properties = rproperties.non_dependency ()
@@ -1142,9 +1215,9 @@ class BasicTarget (AbstractTarget):
self.manager_.targets().log(
"Build properties: '%s'" % str(rproperties))
-
+
source_targets += rproperties.get('<source>')
-
+
# We might get duplicate sources, for example if
# we link to two library which have the same <library> in
# usage requirements.
@@ -1170,7 +1243,7 @@ class BasicTarget (AbstractTarget):
self.manager().virtual_targets().recent_targets(), ps,
source_targets, rproperties, usage_requirements)
self.manager().virtual_targets().clear_recent_targets()
-
+
ur = self.compute_usage_requirements (s)
ur = ur.add (gur)
s.set_usage_requirements (ur)
@@ -1178,7 +1251,7 @@ class BasicTarget (AbstractTarget):
self.manager_.targets().log (
"Usage requirements from '%s' are '%s'" %
(self.name(), str(rproperties)))
-
+
self.generated_[ps] = GenerateResult (ur, result)
else:
self.generated_[ps] = GenerateResult (property_set.empty(), [])
@@ -1195,7 +1268,7 @@ class BasicTarget (AbstractTarget):
# dependencies
# - it's not clear if that's a good idea anyway. The alias
# target, for example, should not fail to build if a dependency
- # fails.
+ # fails.
self.generated_[ps] = GenerateResult(
property_set.create(["<build>no"]), [])
else:
@@ -1205,21 +1278,22 @@ class BasicTarget (AbstractTarget):
self.manager().targets().decrease_indent()
return self.generated_[ps]
-
+
def compute_usage_requirements (self, subvariant):
- """ Given the set of generated targets, and refined build
+ """ Given the set of generated targets, and refined build
properties, determines and sets appripriate usage requirements
on those targets.
"""
+ assert isinstance(subvariant, virtual_target.Subvariant)
rproperties = subvariant.build_properties ()
xusage_requirements =self.evaluate_requirements(
self.usage_requirements_, rproperties, "added")
-
+
# We generate all dependency properties and add them,
# as well as their usage requirements, to result.
(r1, r2) = self.generate_dependency_properties(xusage_requirements.dependency (), rproperties)
extra = r1 + r2
-
+
result = property_set.create (xusage_requirements.non_dependency () + extra)
# Propagate usage requirements we've got from sources, except
@@ -1230,7 +1304,7 @@ class BasicTarget (AbstractTarget):
#
# pch pch1 : ...
# lib lib1 : ..... pch1 ;
- # pch pch2 :
+ # pch pch2 :
# lib lib2 : pch2 lib1 ;
#
# Here, lib2 should not get <pch-header> property from pch1.
@@ -1241,7 +1315,7 @@ class BasicTarget (AbstractTarget):
# features are special.
removed_pch = filter(lambda prop: prop.feature().name() not in ['<pch-header>', '<pch-file>'], subvariant.sources_usage_requirements().all())
result = result.add(property_set.PropertySet(removed_pch))
-
+
return result
def create_subvariant (self, root_targets, all_targets,
@@ -1249,23 +1323,29 @@ class BasicTarget (AbstractTarget):
rproperties, usage_requirements):
"""Creates a new subvariant-dg instances for 'targets'
- 'root-targets' the virtual targets will be returned to dependents
- - 'all-targets' all virtual
+ - 'all-targets' all virtual
targets created while building this main target
- 'build-request' is property-set instance with
requested build properties"""
-
+ assert is_iterable_typed(root_targets, virtual_target.VirtualTarget)
+ assert is_iterable_typed(all_targets, virtual_target.VirtualTarget)
+ assert isinstance(build_request, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+ assert isinstance(rproperties, property_set.PropertySet)
+ assert isinstance(usage_requirements, property_set.PropertySet)
+
for e in root_targets:
e.root (True)
s = Subvariant (self, build_request, sources,
rproperties, usage_requirements, all_targets)
-
+
for v in all_targets:
if not v.creating_subvariant():
v.creating_subvariant(s)
-
+
return s
-
+
def construct (self, name, source_targets, properties):
""" Constructs the virtual targets for this abstract targets and
the dependecy graph. Returns a tuple consisting of the properties and the list of virtual targets.
@@ -1276,21 +1356,24 @@ class BasicTarget (AbstractTarget):
class TypedTarget (BasicTarget):
import generators
-
+
def __init__ (self, name, project, type, sources, requirements, default_build, usage_requirements):
+ assert isinstance(type, basestring)
BasicTarget.__init__ (self, name, project, sources, requirements, default_build, usage_requirements)
self.type_ = type
def __jam_repr__(self):
return b2.util.value_to_jam(self)
-
+
def type (self):
return self.type_
-
- def construct (self, name, source_targets, prop_set):
+ def construct (self, name, source_targets, prop_set):
+ assert isinstance(name, basestring)
+ assert is_iterable_typed(source_targets, virtual_target.VirtualTarget)
+ assert isinstance(prop_set, property_set.PropertySet)
r = generators.construct (self.project_, os.path.splitext(name)[0],
- self.type_,
+ self.type_,
prop_set.add_raw(['<main-target-type>' + self.type_]),
source_targets, True)
@@ -1303,17 +1386,19 @@ class TypedTarget (BasicTarget):
print "error: and the requested properties"
print "error: make sure you've configured the needed tools"
print "See http://boost.org/boost-build2/doc/html/bbv2/advanced/configuration.html"
-
+
print "To debug this problem, try the --debug-generators option."
sys.exit(1)
-
+
return r
-def apply_default_build(property_set, default_build):
+def apply_default_build(property_set_, default_build):
# 1. First, see what properties from default_build
- # are already present in property_set.
+ # are already present in property_set.
+ assert isinstance(property_set_, property_set.PropertySet)
+ assert isinstance(default_build, property_set.PropertySet)
- specified_features = set(p.feature() for p in property_set.all())
+ specified_features = set(p.feature() for p in property_set_.all())
defaults_to_apply = []
for d in default_build.all():
@@ -1341,24 +1426,29 @@ def apply_default_build(property_set, default_build):
# be an indication that
# build_request.expand-no-defaults is the wrong rule
# to use here.
- compressed = feature.compress_subproperties(property_set.all())
+ compressed = feature.compress_subproperties(property_set_.all())
result = build_request.expand_no_defaults(
b2.build.property_set.create(feature.expand([p])) for p in (compressed + defaults_to_apply))
else:
- result.append (property_set)
+ result.append (property_set_)
return result
def create_typed_metatarget(name, type, sources, requirements, default_build, usage_requirements):
-
+ assert isinstance(name, basestring)
+ assert isinstance(type, basestring)
+ assert is_iterable_typed(requirements, basestring)
+ assert is_iterable_typed(default_build, basestring)
+ assert is_iterable_typed(usage_requirements, basestring)
+
from b2.manager import get_manager
t = get_manager().targets()
-
+
project = get_manager().projects().current()
-
+
return t.main_target_alternative(
TypedTarget(name, project, type,
t.main_target_sources(sources, name),
@@ -1368,17 +1458,22 @@ def create_typed_metatarget(name, type, sources, requirements, default_build, us
def create_metatarget(klass, name, sources, requirements=[], default_build=[], usage_requirements=[]):
+ assert isinstance(name, basestring)
+ assert is_iterable_typed(sources, basestring)
+ assert is_iterable_typed(requirements, basestring)
+ assert is_iterable_typed(default_build, basestring)
+ assert is_iterable_typed(usage_requirements, basestring)
from b2.manager import get_manager
t = get_manager().targets()
-
+
project = get_manager().projects().current()
-
+
return t.main_target_alternative(
klass(name, project,
t.main_target_sources(sources, name),
t.main_target_requirements(requirements, project),
t.main_target_default_build(default_build, project),
- t.main_target_usage_requirements(usage_requirements, project)))
+ t.main_target_usage_requirements(usage_requirements, project)))
def metatarget_function_for_class(class_):
@@ -1390,7 +1485,7 @@ def metatarget_function_for_class(class_):
t = get_manager().targets()
project = get_manager().projects().current()
-
+
return t.main_target_alternative(
class_(name, project,
t.main_target_sources(sources, name),
diff --git a/tools/build/src/build/toolset.py b/tools/build/src/build/toolset.py
index e969123d44..672d18f5ad 100644
--- a/tools/build/src/build/toolset.py
+++ b/tools/build/src/build/toolset.py
@@ -1,18 +1,20 @@
# Status: being ported by Vladimir Prus
# Base revision: 40958
#
-# Copyright 2003 Dave Abrahams
-# Copyright 2005 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2003 Dave Abrahams
+# Copyright 2005 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
""" Support for toolset definition.
"""
import feature, property, generators, property_set
import b2.util.set
-from b2.util import cached, qualify_jam_action
+import bjam
+
+from b2.util import cached, qualify_jam_action, is_iterable_typed, is_iterable
from b2.util.utility import *
from b2.util import bjam_signature
from b2.manager import get_manager
@@ -22,7 +24,7 @@ __re_two_ampersands = re.compile ('(&&)')
__re_first_segment = re.compile ('([^.]*).*')
__re_first_group = re.compile (r'[^.]*\.(.*)')
-# Flag is a mechanism to set a value
+# Flag is a mechanism to set a value
# A single toolset flag. Specifies that when certain
# properties are in build property set, certain values
# should be appended to some variable.
@@ -30,13 +32,18 @@ __re_first_group = re.compile (r'[^.]*\.(.*)')
# A flag applies to a specific action in specific module.
# The list of all flags for a module is stored, and each
# flag further contains the name of the rule it applies
-# for,
+# for,
class Flag:
def __init__(self, variable_name, values, condition, rule = None):
+ assert isinstance(variable_name, basestring)
+ assert is_iterable(values) and all(
+ isinstance(v, (basestring, type(None))) for v in values)
+ assert is_iterable_typed(condition, property_set.PropertySet)
+ assert isinstance(rule, (basestring, type(None)))
self.variable_name = variable_name
self.values = values
- self.condition = condition
+ self.condition = condition
self.rule = rule
def __str__(self):
@@ -47,7 +54,7 @@ def reset ():
""" Clear the module state. This is mainly for testing purposes.
"""
global __module_flags, __flags, __stv
-
+
# Mapping from module name to a list of all flags that apply
# to either that module directly, or to any rule in that module.
# Each element of the list is Flag instance.
@@ -61,21 +68,21 @@ def reset ():
# entries for module name 'xxx', they are flags for 'xxx' itself,
# not including any rules in that module.
__flags = {}
-
+
# A cache for varaible settings. The key is generated from the rule name and the properties.
__stv = {}
-
+
reset ()
# FIXME: --ignore-toolset-requirements
def using(toolset_module, *args):
loaded_toolset_module= get_manager().projects().load_module(toolset_module, [os.getcwd()]);
loaded_toolset_module.init(*args)
-
+
# FIXME push-checking-for-flags-module ....
# FIXME: investigate existing uses of 'hack-hack' parameter
# in jam code.
-
+
@bjam_signature((["rule_or_module", "variable_name", "condition", "*"],
["values", "*"]))
def flags(rule_or_module, variable_name, condition, values = []):
@@ -84,7 +91,7 @@ def flags(rule_or_module, variable_name, condition, values = []):
rule_or_module: If contains dot, should be a rule name.
The flags will be applied when that rule is
used to set up build actions.
-
+
If does not contain dot, should be a module name.
The flags will be applied for all rules in that
module.
@@ -92,7 +99,7 @@ def flags(rule_or_module, variable_name, condition, values = []):
module, an error is issued.
variable_name: Variable that should be set on target
-
+
condition A condition when this flag should be applied.
Should be set of property sets. If one of
those property sets is contained in build
@@ -102,21 +109,25 @@ def flags(rule_or_module, variable_name, condition, values = []):
"gcc". Subfeatures, like in "<toolset>gcc-3.2"
are allowed. If left empty, the flag will
always used.
-
- Propery sets may use value-less properties
- ('<a>' vs. '<a>value') to match absent
+
+ Propery sets may use value-less properties
+ ('<a>' vs. '<a>value') to match absent
properties. This allows to separately match
-
+
<architecture>/<address-model>64
<architecture>ia64/<address-model>
-
+
Where both features are optional. Without this
syntax we'd be forced to define "default" value.
values: The value to add to variable. If <feature>
- is specified, then the value of 'feature'
+ is specified, then the value of 'feature'
will be added.
"""
+ assert isinstance(rule_or_module, basestring)
+ assert isinstance(variable_name, basestring)
+ assert is_iterable_typed(condition, basestring)
+ assert is_iterable(values) and all(isinstance(v, (basestring, type(None))) for v in values)
caller = bjam.caller()
if not '.' in rule_or_module and caller and caller[:-1].startswith("Jamfile"):
# Unqualified rule name, used inside Jamfile. Most likely used with
@@ -129,17 +140,17 @@ def flags(rule_or_module, variable_name, condition, values = []):
# FIXME: revive checking that we don't set flags for a different
# module unintentionally
pass
-
+
if condition and not replace_grist (condition, ''):
# We have condition in the form '<feature>', that is, without
# value. That's a previous syntax:
#
# flags gcc.link RPATH <dll-path> ;
# for compatibility, convert it to
- # flags gcc.link RPATH : <dll-path> ;
+ # flags gcc.link RPATH : <dll-path> ;
values = [ condition ]
condition = None
-
+
if condition:
transformed = []
for c in condition:
@@ -150,14 +161,17 @@ def flags(rule_or_module, variable_name, condition, values = []):
condition = transformed
property.validate_property_sets(condition)
-
+
__add_flag (rule_or_module, variable_name, condition, values)
def set_target_variables (manager, rule_or_module, targets, ps):
"""
"""
+ assert isinstance(rule_or_module, basestring)
+ assert is_iterable_typed(targets, basestring)
+ assert isinstance(ps, property_set.PropertySet)
settings = __set_target_variables_aux(manager, rule_or_module, ps)
-
+
if settings:
for s in settings:
for target in targets:
@@ -166,7 +180,8 @@ def set_target_variables (manager, rule_or_module, targets, ps):
def find_satisfied_condition(conditions, ps):
"""Returns the first element of 'property-sets' which is a subset of
'properties', or an empty list if no such element exists."""
-
+ assert is_iterable_typed(conditions, property_set.PropertySet)
+ assert isinstance(ps, property_set.PropertySet)
features = set(p.feature() for p in ps.all())
for condition in conditions:
@@ -177,11 +192,11 @@ def find_satisfied_condition(conditions, ps):
found = False
if i.value():
found = i.value() in ps.get(i.feature())
- else:
- # Handle value-less properties like '<architecture>' (compare with
+ else:
+ # Handle value-less properties like '<architecture>' (compare with
# '<architecture>x86').
- # If $(i) is a value-less property it should match default
- # value of an optional property. See the first line in the
+ # If $(i) is a value-less property it should match default
+ # value of an optional property. See the first line in the
# example below:
#
# property set properties result
@@ -197,22 +212,27 @@ def find_satisfied_condition(conditions, ps):
return condition
return None
-
+
def register (toolset):
""" Registers a new toolset.
"""
+ assert isinstance(toolset, basestring)
feature.extend('toolset', [toolset])
def inherit_generators (toolset, properties, base, generators_to_ignore = []):
+ assert isinstance(toolset, basestring)
+ assert is_iterable_typed(properties, basestring)
+ assert isinstance(base, basestring)
+ assert is_iterable_typed(generators_to_ignore, basestring)
if not properties:
properties = [replace_grist (toolset, '<toolset>')]
-
+
base_generators = generators.generators_for_toolset(base)
-
+
for g in base_generators:
id = g.id()
-
+
if not id in generators_to_ignore:
# Some generator names have multiple periods in their name, so
# $(id:B=$(toolset)) doesn't generate the right new_id name.
@@ -232,13 +252,16 @@ def inherit_flags(toolset, base, prohibited_properties = []):
'prohibited-properties' are ignored. Don't confuse property and feature, for
example <debug-symbols>on and <debug-symbols>off, so blocking one of them does
not block the other one.
-
+
The flag conditions are not altered at all, so if a condition includes a name,
or version of a base toolset, it won't ever match the inheriting toolset. When
such flag settings must be inherited, define a rule in base toolset module and
call it as needed."""
+ assert isinstance(toolset, basestring)
+ assert isinstance(base, basestring)
+ assert is_iterable_typed(prohibited_properties, basestring)
for f in __module_flags.get(base, []):
-
+
if not f.condition or b2.util.set.difference(f.condition, prohibited_properties):
match = __re_first_group.match(f.rule)
rule_ = None
@@ -254,38 +277,20 @@ def inherit_flags(toolset, base, prohibited_properties = []):
__add_flag (new_rule_or_module, f.variable_name, f.condition, f.values)
-def inherit_rules (toolset, base):
- pass
- # FIXME: do something about this.
-# base_generators = generators.generators_for_toolset (base)
-# import action
+def inherit_rules(toolset, base):
+ engine = get_manager().engine()
+ new_actions = {}
+ for action_name, action in engine.actions.iteritems():
+ module, id = split_action_id(action_name)
+ if module == base:
+ new_action_name = toolset + '.' + id
+ # make sure not to override any existing actions
+ # that may have been declared already
+ if new_action_name not in engine.actions:
+ new_actions[new_action_name] = action
-# ids = []
-# for g in base_generators:
-# (old_toolset, id) = split_action_id (g.id ())
-# ids.append (id) ;
-
-# new_actions = []
-
-# engine = get_manager().engine()
- # FIXME: do this!
-# for action in engine.action.values():
-# pass
-# (old_toolset, id) = split_action_id(action.action_name)
-#
-# if old_toolset == base:
-# new_actions.append ((id, value [0], value [1]))
-#
-# for a in new_actions:
-# action.register (toolset + '.' + a [0], a [1], a [2])
-
- # TODO: how to deal with this?
-# IMPORT $(base) : $(rules) : $(toolset) : $(rules) : localized ;
-# # Import the rules to the global scope
-# IMPORT $(toolset) : $(rules) : : $(toolset).$(rules) ;
-# }
-#
+ engine.actions.update(new_actions)
######################################################################################
# Private functions
@@ -294,12 +299,14 @@ def inherit_rules (toolset, base):
def __set_target_variables_aux (manager, rule_or_module, ps):
""" Given a rule name and a property set, returns a list of tuples of
variables names and values, which must be set on targets for that
- rule/properties combination.
+ rule/properties combination.
"""
+ assert isinstance(rule_or_module, basestring)
+ assert isinstance(ps, property_set.PropertySet)
result = []
for f in __flags.get(rule_or_module, []):
-
+
if not f.condition or find_satisfied_condition (f.condition, ps):
processed = []
for v in f.values:
@@ -309,10 +316,10 @@ def __set_target_variables_aux (manager, rule_or_module, ps):
for r in processed:
result.append ((f.variable_name, r))
-
+
# strip away last dot separated part and recurse.
next = __re_split_last_segment.match(rule_or_module)
-
+
if next:
result.extend(__set_target_variables_aux(
manager, next.group(1), ps))
@@ -320,12 +327,14 @@ def __set_target_variables_aux (manager, rule_or_module, ps):
return result
def __handle_flag_value (manager, value, ps):
+ assert isinstance(value, basestring)
+ assert isinstance(ps, property_set.PropertySet)
result = []
-
+
if get_grist (value):
f = feature.get(value)
values = ps.get(f)
-
+
for value in values:
if f.dependency():
@@ -334,7 +343,7 @@ def __handle_flag_value (manager, value, ps):
result.append(value.actualize())
elif f.path() or f.free():
-
+
# Treat features with && in the value
# specially -- each &&-separated element is considered
# separate value. This is needed to handle searched
@@ -355,8 +364,13 @@ def __add_flag (rule_or_module, variable_name, condition, values):
""" Adds a new flag setting with the specified values.
Does no checking.
"""
+ assert isinstance(rule_or_module, basestring)
+ assert isinstance(variable_name, basestring)
+ assert is_iterable_typed(condition, property_set.PropertySet)
+ assert is_iterable(values) and all(
+ isinstance(v, (basestring, type(None))) for v in values)
f = Flag(variable_name, values, condition, rule_or_module)
-
+
# Grab the name of the module
m = __re_first_segment.match (rule_or_module)
assert m
@@ -377,21 +391,24 @@ def add_requirements(requirements):
will be automatically added to the requirements for all main targets, as if
they were specified literally. For best results, all requirements added should
be conditional or indirect conditional."""
-
+ assert is_iterable_typed(requirements, basestring)
+
#if ! $(.ignore-requirements)
#{
__requirements.extend(requirements)
#}
-
+
# Make toolset 'toolset', defined in a module of the same name,
# inherit from 'base'
# 1. The 'init' rule from 'base' is imported into 'toolset' with full
# name. Another 'init' is called, which forwards to the base one.
-# 2. All generators from 'base' are cloned. The ids are adjusted and
+# 2. All generators from 'base' are cloned. The ids are adjusted and
# <toolset> property in requires is adjusted too
# 3. All flags are inherited
# 4. All rules are imported.
def inherit(toolset, base):
+ assert isinstance(toolset, basestring)
+ assert isinstance(base, basestring)
get_manager().projects().load_module(base, []);
inherit_generators(toolset, [], base)
diff --git a/tools/build/src/build/type.py b/tools/build/src/build/type.py
index e815739f40..c8d6334c79 100644
--- a/tools/build/src/build/type.py
+++ b/tools/build/src/build/type.py
@@ -14,7 +14,7 @@ import os.path
from b2.util.utility import replace_grist, os_name
from b2.exceptions import *
from b2.build import feature, property, scanner
-from b2.util import bjam_signature
+from b2.util import bjam_signature, is_iterable_typed
__re_hyphen = re.compile ('-')
@@ -32,17 +32,17 @@ def __register_features ():
def reset ():
""" Clear the module state. This is mainly for testing purposes.
Note that this must be called _after_ resetting the module 'feature'.
- """
+ """
global __prefixes_suffixes, __suffixes_to_types, __types, __rule_names_to_types, __target_suffixes_cache
-
+
__register_features ()
# Stores suffixes for generated targets.
__prefixes_suffixes = [property.PropertyMap(), property.PropertyMap()]
-
+
# Maps suffixes to types
__suffixes_to_types = {}
-
+
# A map with all the registered types, indexed by the type name
# Each entry is a dictionary with following values:
# 'base': the name of base type or None if type has no base
@@ -52,12 +52,12 @@ def reset ():
# Caches suffixes for targets with certain properties.
__target_suffixes_cache = {}
-
+
reset ()
@bjam_signature((["type"], ["suffixes", "*"], ["base_type", "?"]))
def register (type, suffixes = [], base_type = None):
- """ Registers a target type, possibly derived from a 'base-type'.
+ """ Registers a target type, possibly derived from a 'base-type'.
If 'suffixes' are provided, they list all the suffixes that mean a file is of 'type'.
Also, the first element gives the suffix to be used when constructing and object of
'type'.
@@ -70,7 +70,7 @@ def register (type, suffixes = [], base_type = None):
# which need to be decomposed.
if __re_hyphen.search (type):
raise BaseException ('type name "%s" contains a hyphen' % type)
-
+
if __types.has_key (type):
raise BaseException ('Type "%s" is already registered.' % type)
@@ -79,7 +79,7 @@ def register (type, suffixes = [], base_type = None):
entry ['derived'] = []
entry ['scanner'] = None
__types [type] = entry
-
+
if base_type:
__types.setdefault(base_type, {}).setdefault('derived', []).append(type)
@@ -87,17 +87,17 @@ def register (type, suffixes = [], base_type = None):
# Generated targets of 'type' will use the first of 'suffixes'
# (this may be overriden)
set_generated_target_suffix (type, [], suffixes [0])
-
+
# Specify mapping from suffixes to type
register_suffixes (suffixes, type)
-
+
feature.extend('target-type', [type])
feature.extend('main-target-type', [type])
feature.extend('base-target-type', [type])
if base_type:
- feature.compose ('<target-type>' + type, replace_grist (base_type, '<base-target-type>'))
- feature.compose ('<base-target-type>' + type, '<base-target-type>' + base_type)
+ feature.compose ('<target-type>' + type, [replace_grist (base_type, '<base-target-type>')])
+ feature.compose ('<base-target-type>' + type, ['<base-target-type>' + base_type])
import b2.build.generators as generators
# Adding a new derived type affects generator selection so we need to
@@ -111,13 +111,16 @@ def register (type, suffixes = [], base_type = None):
# FIXME: quick hack.
def type_from_rule_name(rule_name):
+ assert isinstance(rule_name, basestring)
return rule_name.upper().replace("-", "_")
def register_suffixes (suffixes, type):
- """ Specifies that targets with suffix from 'suffixes' have the type 'type'.
+ """ Specifies that targets with suffix from 'suffixes' have the type 'type'.
If a different type is already specified for any of syffixes, issues an error.
"""
+ assert is_iterable_typed(suffixes, basestring)
+ assert isinstance(type, basestring)
for s in suffixes:
if __suffixes_to_types.has_key (s):
old_type = __suffixes_to_types [s]
@@ -129,40 +132,51 @@ def register_suffixes (suffixes, type):
def registered (type):
""" Returns true iff type has been registered.
"""
+ assert isinstance(type, basestring)
return __types.has_key (type)
def validate (type):
""" Issues an error if 'type' is unknown.
"""
+ assert isinstance(type, basestring)
if not registered (type):
raise BaseException ("Unknown target type '%s'" % type)
def set_scanner (type, scanner):
""" Sets a scanner class that will be used for this 'type'.
"""
+ if __debug__:
+ from .scanner import Scanner
+ assert isinstance(type, basestring)
+ assert issubclass(scanner, Scanner)
validate (type)
__types [type]['scanner'] = scanner
def get_scanner (type, prop_set):
""" Returns a scanner instance appropriate to 'type' and 'property_set'.
"""
+ if __debug__:
+ from .property_set import PropertySet
+ assert isinstance(type, basestring)
+ assert isinstance(prop_set, PropertySet)
if registered (type):
scanner_type = __types [type]['scanner']
if scanner_type:
return scanner.get (scanner_type, prop_set.raw ())
pass
-
+
return None
def base(type):
"""Returns a base type for the given type or nothing in case the given type is
not derived."""
-
+ assert isinstance(type, basestring)
return __types[type]['base']
def all_bases (type):
""" Returns type and all of its bases, in the order of their distance from type.
"""
+ assert isinstance(type, basestring)
result = []
while type:
result.append (type)
@@ -173,6 +187,7 @@ def all_bases (type):
def all_derived (type):
""" Returns type and all classes that derive from it, in the order of their distance from type.
"""
+ assert isinstance(type, basestring)
result = [type]
for d in __types [type]['derived']:
result.extend (all_derived (d))
@@ -182,21 +197,25 @@ def all_derived (type):
def is_derived (type, base):
""" Returns true if 'type' is 'base' or has 'base' as its direct or indirect base.
"""
+ assert isinstance(type, basestring)
+ assert isinstance(base, basestring)
# TODO: this isn't very efficient, especially for bases close to type
if base in all_bases (type):
return True
- else:
+ else:
return False
def is_subtype (type, base):
""" Same as is_derived. Should be removed.
"""
+ assert isinstance(type, basestring)
+ assert isinstance(base, basestring)
# TODO: remove this method
return is_derived (type, base)
@bjam_signature((["type"], ["properties", "*"], ["suffix"]))
def set_generated_target_suffix (type, properties, suffix):
- """ Sets a target suffix that should be used when generating target
+ """ Sets a target suffix that should be used when generating target
of 'type' with the specified properties. Can be called with
empty properties if no suffix for 'type' was specified yet.
This does not automatically specify that files 'suffix' have
@@ -208,17 +227,27 @@ def set_generated_target_suffix (type, properties, suffix):
The 'suffix' parameter can be empty string ("") to indicate that
no suffix should be used.
"""
+ assert isinstance(type, basestring)
+ assert is_iterable_typed(properties, basestring)
+ assert isinstance(suffix, basestring)
set_generated_target_ps(1, type, properties, suffix)
-
+
def change_generated_target_suffix (type, properties, suffix):
- """ Change the suffix previously registered for this type/properties
+ """ Change the suffix previously registered for this type/properties
combination. If suffix is not yet specified, sets it.
"""
+ assert isinstance(type, basestring)
+ assert is_iterable_typed(properties, basestring)
+ assert isinstance(suffix, basestring)
change_generated_target_ps(1, type, properties, suffix)
def generated_target_suffix(type, properties):
+ if __debug__:
+ from .property_set import PropertySet
+ assert isinstance(type, basestring)
+ assert isinstance(properties, PropertySet)
return generated_target_ps(1, type, properties)
# Sets a target prefix that should be used when generating targets of 'type'
@@ -236,16 +265,31 @@ def set_generated_target_prefix(type, properties, prefix):
# Change the prefix previously registered for this type/properties combination.
# If prefix is not yet specified, sets it.
def change_generated_target_prefix(type, properties, prefix):
+ assert isinstance(type, basestring)
+ assert is_iterable_typed(properties, basestring)
+ assert isinstance(prefix, basestring)
change_generated_target_ps(0, type, properties, prefix)
def generated_target_prefix(type, properties):
+ if __debug__:
+ from .property_set import PropertySet
+ assert isinstance(type, basestring)
+ assert isinstance(properties, PropertySet)
return generated_target_ps(0, type, properties)
def set_generated_target_ps(is_suffix, type, properties, val):
+ assert isinstance(is_suffix, (int, bool))
+ assert isinstance(type, basestring)
+ assert is_iterable_typed(properties, basestring)
+ assert isinstance(val, basestring)
properties.append ('<target-type>' + type)
__prefixes_suffixes[is_suffix].insert (properties, val)
def change_generated_target_ps(is_suffix, type, properties, val):
+ assert isinstance(is_suffix, (int, bool))
+ assert isinstance(type, basestring)
+ assert is_iterable_typed(properties, basestring)
+ assert isinstance(val, basestring)
properties.append ('<target-type>' + type)
prev = __prefixes_suffixes[is_suffix].find_replace(properties, val)
if not prev:
@@ -256,7 +300,9 @@ def change_generated_target_ps(is_suffix, type, properties, val):
# If no prefix/suffix is specified for 'type', returns prefix/suffix for
# base type, if any.
def generated_target_ps_real(is_suffix, type, properties):
-
+ assert isinstance(is_suffix, (int, bool))
+ assert isinstance(type, basestring)
+ assert is_iterable_typed(properties, basestring)
result = ''
found = False
while type and not found:
@@ -278,6 +324,11 @@ def generated_target_ps(is_suffix, type, prop_set):
with the specified properties. If not suffix were specified for
'type', returns suffix for base type, if any.
"""
+ if __debug__:
+ from .property_set import PropertySet
+ assert isinstance(is_suffix, (int, bool))
+ assert isinstance(type, basestring)
+ assert isinstance(prop_set, PropertySet)
key = (is_suffix, type, prop_set)
v = __target_suffixes_cache.get(key, None)
@@ -289,14 +340,15 @@ def generated_target_ps(is_suffix, type, prop_set):
def type(filename):
""" Returns file type given it's name. If there are several dots in filename,
- tries each suffix. E.g. for name of "file.so.1.2" suffixes "2", "1", and
+ tries each suffix. E.g. for name of "file.so.1.2" suffixes "2", "1", and
"so" will be tried.
"""
+ assert isinstance(filename, basestring)
while 1:
filename, suffix = os.path.splitext (filename)
if not suffix: return None
suffix = suffix[1:]
-
+
if __suffixes_to_types.has_key(suffix):
return __suffixes_to_types[suffix]
@@ -306,6 +358,10 @@ def register_type (type, suffixes, base_type = None, os = []):
if os is not specified. This rule is injected into each of the type
modules for the sake of convenience.
"""
+ assert isinstance(type, basestring)
+ assert is_iterable_typed(suffixes, basestring)
+ assert isinstance(base_type, basestring) or base_type is None
+ assert is_iterable_typed(os, basestring)
if registered (type):
return
diff --git a/tools/build/src/build/virtual_target.py b/tools/build/src/build/virtual_target.py
index ac6703056b..ea4b24d820 100644
--- a/tools/build/src/build/virtual_target.py
+++ b/tools/build/src/build/virtual_target.py
@@ -67,7 +67,7 @@ import os.path
import string
import types
-from b2.util import path, utility, set
+from b2.util import path, utility, set, is_iterable_typed
from b2.util.utility import add_grist, get_grist, ungrist, replace_grist, get_value
from b2.util.sequence import unique
from b2.tools import common
@@ -110,6 +110,7 @@ class VirtualTargetRegistry:
and equal action. If such target is found it is retured and 'target' is not registered.
Otherwise, 'target' is registered and returned.
"""
+ assert isinstance(target, VirtualTarget)
if target.path():
signature = target.path() + "-" + target.name()
else:
@@ -156,6 +157,11 @@ class VirtualTargetRegistry:
for the project, and use that path to determine if the target was already created.
TODO: passing project with all virtual targets starts to be annoying.
"""
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(file, basestring)
+ assert isinstance(file_location, basestring)
+ assert isinstance(project, ProjectTarget)
# Check if we've created a target corresponding to this file.
path = os.path.join(os.getcwd(), file_location, file)
path = os.path.normpath(path)
@@ -192,6 +198,8 @@ class VirtualTargetRegistry:
return [t for t in targets if b2.build.type.is_sybtype(t.type(), type)]
def register_actual_name (self, actual_name, virtual_target):
+ assert isinstance(actual_name, basestring)
+ assert isinstance(virtual_target, VirtualTarget)
if self.actual_.has_key (actual_name):
cs1 = self.actual_ [actual_name].creating_subvariant ()
cs2 = virtual_target.creating_subvariant ()
@@ -238,6 +246,9 @@ class VirtualTargetRegistry:
""" Appends the suffix appropriate to 'type/property_set' combination
to the specified name and returns the result.
"""
+ assert isinstance(specified_name, basestring)
+ assert isinstance(file_type, basestring)
+ assert isinstance(prop_set, property_set.PropertySet)
suffix = b2.build.type.generated_target_suffix (file_type, prop_set)
if suffix:
@@ -254,6 +265,10 @@ class VirtualTarget:
project: project to which this target belongs.
"""
def __init__ (self, name, project):
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(name, basestring)
+ assert isinstance(project, ProjectTarget)
self.name_ = name
self.project_ = project
self.dependencies_ = []
@@ -302,6 +317,9 @@ class VirtualTarget:
If scanner is not specified, then actual target is returned.
"""
+ if __debug__:
+ from .scanner import Scanner
+ assert scanner is None or isinstance(scanner, Scanner)
actual_name = self.actualize_no_scanner ()
if self.always_:
@@ -373,6 +391,9 @@ class AbstractFileTarget (VirtualTarget):
type: optional type of this target.
"""
def __init__ (self, name, type, project, action = None, exact=False):
+ assert isinstance(type, basestring) or type is None
+ assert action is None or isinstance(action, Action)
+ assert isinstance(exact, (int, bool))
VirtualTarget.__init__ (self, name, project)
self.type_ = type
@@ -402,6 +423,7 @@ class AbstractFileTarget (VirtualTarget):
""" Sets the path. When generating target name, it will override any path
computation from properties.
"""
+ assert isinstance(path, basestring)
self.path_ = os.path.normpath(path)
def action (self):
@@ -413,6 +435,7 @@ class AbstractFileTarget (VirtualTarget):
""" Sets/gets the 'root' flag. Target is root is it directly correspods to some
variant of a main target.
"""
+ assert isinstance(set, (int, bool, type(None)))
if set:
self.root_ = True
return self.root_
@@ -425,6 +448,7 @@ class AbstractFileTarget (VirtualTarget):
s: If specified, specified the value to set,
which should be instance of 'subvariant' class.
"""
+ assert s is None or isinstance(s, Subvariant)
if s and not self.creating_subvariant ():
if self.creating_subvariant ():
raise BaseException ("Attempt to change 'dg'")
@@ -435,6 +459,7 @@ class AbstractFileTarget (VirtualTarget):
return self.creating_subvariant_
def actualize_action (self, target):
+ assert isinstance(target, basestring)
if self.action_:
self.action_.actualize ()
@@ -513,7 +538,7 @@ class AbstractFileTarget (VirtualTarget):
If not <tag> property is specified, or the rule specified by
<tag> returns nothing, returns the result of calling
virtual-target.add-suffix"""
-
+ assert isinstance(specified_name, basestring)
if self.action_:
ps = self.action_.properties()
else:
@@ -627,6 +652,9 @@ class FileTarget (AbstractFileTarget):
- the suffix which correspond to the target's type.
"""
def __init__ (self, name, type, project, action = None, path=None, exact=False):
+ assert isinstance(type, basestring) or type is None
+ assert action is None or isinstance(action, Action)
+ assert isinstance(exact, (int, bool))
AbstractFileTarget.__init__ (self, name, type, project, action, exact)
self.path_ = path
@@ -638,10 +666,12 @@ class FileTarget (AbstractFileTarget):
return self.name_
def clone_with_different_type(self, new_type):
+ assert isinstance(new_type, basestring)
return FileTarget(self.name_, new_type, self.project_,
self.action_, self.path_, exact=True)
def actualize_location (self, target):
+ assert isinstance(target, basestring)
engine = self.project_.manager_.engine ()
if self.action_:
@@ -714,6 +744,7 @@ class FileTarget (AbstractFileTarget):
class NotFileTarget(AbstractFileTarget):
def __init__(self, name, project, action):
+ assert isinstance(action, Action)
AbstractFileTarget.__init__(self, name, None, project, action)
def path(self):
@@ -721,6 +752,7 @@ class NotFileTarget(AbstractFileTarget):
return None
def actualize_location(self, target):
+ assert isinstance(target, basestring)
bjam.call("NOTFILE", target)
bjam.call("ALWAYS", target)
bjam.call("NOUPDATE", target)
@@ -735,8 +767,9 @@ class Action:
not establish dependency relationship, but should do everything else.
"""
def __init__ (self, manager, sources, action_name, prop_set):
+ assert is_iterable_typed(sources, VirtualTarget)
+ assert isinstance(action_name, basestring) or action_name is None
assert(isinstance(prop_set, property_set.PropertySet))
- assert type(sources) == types.ListType
self.sources_ = sources
self.action_name_ = action_name
if not prop_set:
@@ -758,11 +791,14 @@ class Action:
def add_targets (self, targets):
+ assert is_iterable_typed(targets, VirtualTarget)
self.targets_ += targets
- def replace_targets (old_targets, new_targets):
- self.targets_ = [t for t in targets if not t in old_targets] + new_targets
+ def replace_targets(self, old_targets, new_targets):
+ assert is_iterable_typed(old_targets, VirtualTarget)
+ assert is_iterable_typed(new_targets, VirtualTarget)
+ self.targets_ = [t for t in self.targets_ if not t in old_targets] + new_targets
def targets (self):
return self.targets_
@@ -826,6 +862,8 @@ class Action:
For each passed source, actualizes it with the appropriate scanner.
Returns the actualized virtual targets.
"""
+ assert is_iterable_typed(sources, VirtualTarget)
+ assert isinstance(prop_set, property_set.PropertySet)
result = []
for i in sources:
scanner = None
@@ -852,6 +890,8 @@ class Action:
New values will be *appended* to the variables. They may be non-empty,
if caller wants it.
"""
+ assert is_iterable_typed(sources, VirtualTarget)
+ assert isinstance(prop_set, property_set.PropertySet)
dependencies = self.properties_.get ('<dependency>')
self.dependency_only_sources_ += self.actualize_source_type (dependencies, prop_set)
@@ -879,6 +919,7 @@ class Action:
to get generated headers correctly. Default implementation returns
its argument.
"""
+ assert isinstance(prop_set, property_set.PropertySet)
return prop_set
@@ -889,6 +930,7 @@ class NullAction (Action):
actions which create them.
"""
def __init__ (self, manager, prop_set):
+ assert isinstance(prop_set, property_set.PropertySet)
Action.__init__ (self, manager, [], None, prop_set)
def actualize (self):
@@ -908,7 +950,8 @@ class NonScanningAction(Action):
Action.__init__(self, b2.manager.get_manager(), sources, action_name, property_set)
def actualize_source_type(self, sources, property_set):
-
+ assert is_iterable_typed(sources, VirtualTarget)
+ assert isinstance(property_set, property_set.PropertySet)
result = []
for s in sources:
result.append(s.actualize())
@@ -920,6 +963,9 @@ def traverse (target, include_roots = False, include_sources = False):
found during traversal, it's either included or not, dependencing of the
value of 'include_roots'. In either case, sources of root are not traversed.
"""
+ assert isinstance(target, VirtualTarget)
+ assert isinstance(include_roots, (int, bool))
+ assert isinstance(include_sources, (int, bool))
result = []
if target.action ():
@@ -951,7 +997,12 @@ def clone_action (action, new_project, new_action_name, new_properties):
and all produced target. The rule-name and properties are set
to 'new-rule-name' and 'new-properties', if those are specified.
Returns the cloned action."""
-
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(action, Action)
+ assert isinstance(new_project, ProjectTarget)
+ assert isinstance(new_action_name, basestring)
+ assert isinstance(new_properties, property_set.PropertySet)
if not new_action_name:
new_action_name = action.action_name()
@@ -990,6 +1041,14 @@ class Subvariant:
sources_usage_requirements: Properties propagated from sources
created_targets: Top-level created targets
"""
+ if __debug__:
+ from .targets import AbstractTarget
+ assert isinstance(main_target, AbstractTarget)
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert is_iterable_typed(sources, VirtualTarget)
+ assert isinstance(build_properties, property_set.PropertySet)
+ assert isinstance(sources_usage_requirements, property_set.PropertySet)
+ assert is_iterable_typed(created_targets, VirtualTarget)
self.main_target_ = main_target
self.properties_ = prop_set
self.sources_ = sources
@@ -1028,6 +1087,7 @@ class Subvariant:
return self.sources_usage_requirements_
def set_usage_requirements (self, usage_requirements):
+ assert isinstance(usage_requirements, property_set.PropertySet)
self.usage_requirements_ = usage_requirements
def usage_requirements (self):
@@ -1038,7 +1098,9 @@ class Subvariant:
either directly or indirectly, and either as sources,
or as dependency properties. Targets referred with
dependency property are returned a properties, not targets."""
-
+ if __debug__:
+ from .targets import GenerateResult
+ assert isinstance(result, GenerateResult)
# Find directly referenced targets.
deps = self.build_properties().dependency()
all_targets = self.sources_ + deps
@@ -1071,7 +1133,8 @@ class Subvariant:
if 'target_type' is not specified), the result will contain
<$(feature)>path-to-that-target.
"""
-
+ assert isinstance(feature, basestring)
+ assert isinstance(target_type, basestring)
if not target_type:
key = feature
else:
@@ -1088,6 +1151,7 @@ class Subvariant:
return result
def all_target_directories(self, target_type = None):
+ assert isinstance(target_type, (basestring, type(None)))
# TODO: does not appear to use target_type in deciding
# if we've computed this already.
if not self.target_directories_:
@@ -1095,6 +1159,7 @@ class Subvariant:
return self.target_directories_
def compute_target_directories(self, target_type=None):
+ assert isinstance(target_type, (basestring, type(None)))
result = []
for t in self.created_targets():
if not target_type or b2.build.type.is_derived(t.type(), target_type):
diff --git a/tools/build/src/build_system.py b/tools/build/src/build_system.py
index 6bd05d1d9a..b5a3b27752 100644
--- a/tools/build/src/build_system.py
+++ b/tools/build/src/build_system.py
@@ -509,15 +509,6 @@ def main_real():
# that all project files already be loaded.
(target_ids, properties) = build_request.from_command_line(sys.argv[1:] + extra_properties)
- # Expand properties specified on the command line into multiple property
- # sets consisting of all legal property combinations. Each expanded property
- # set will be used for a single build run. E.g. if multiple toolsets are
- # specified then requested targets will be built with each of them.
- if properties:
- expanded = build_request.expand_no_defaults(properties)
- else:
- expanded = [property_set.empty()]
-
# Check that we actually found something to build.
if not current_project and not target_ids:
get_manager().errors()("no Jamfile in current directory found, and no target references specified.")
@@ -595,6 +586,22 @@ def main_real():
global results_of_main_targets
+ # Expand properties specified on the command line into multiple property
+ # sets consisting of all legal property combinations. Each expanded property
+ # set will be used for a single build run. E.g. if multiple toolsets are
+ # specified then requested targets will be built with each of them.
+ # The expansion is being performed as late as possible so that the feature
+ # validation is performed after all necessary modules (including project targets
+ # on the command line) have been loaded.
+ if properties:
+ expanded = []
+ for p in properties:
+ expanded.extend(build_request.convert_command_line_element(p))
+
+ expanded = build_request.expand_no_defaults(expanded)
+ else:
+ expanded = [property_set.empty()]
+
# Now that we have a set of targets to build and a set of property sets to
# build the targets with, we can start the main build process by using each
# property set to generate virtual targets from all of our listed targets
diff --git a/tools/build/src/contrib/boost.jam b/tools/build/src/contrib/boost.jam
index 00ea634c04..c3caa3a3bf 100644
--- a/tools/build/src/contrib/boost.jam
+++ b/tools/build/src/contrib/boost.jam
@@ -207,6 +207,7 @@ rule boost_std ( inc ? lib ? )
alias headers ;
boost_lib_std chrono : BOOST_CHRONO_DYN_LINK ;
+ boost_lib_std container : BOOST_CONTAINER_DYN_LINK ;
boost_lib_std date_time : BOOST_DATE_TIME_DYN_LINK ;
boost_lib_std filesystem : BOOST_FILE_SYSTEM_DYN_LINK ;
boost_lib_std graph : BOOST_GRAPH_DYN_LINK ;
diff --git a/tools/build/src/contrib/boost.py b/tools/build/src/contrib/boost.py
index e256fe9658..7d1f6b4d95 100644
--- a/tools/build/src/contrib/boost.py
+++ b/tools/build/src/contrib/boost.py
@@ -4,7 +4,7 @@
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Boost library support module.
-#
+#
# This module allows to use the boost library from boost-build projects.
# The location of a boost source tree or the path to a pre-built
# version of the library can be configured from either site-config.jam
@@ -13,15 +13,15 @@
# tree. As a last resort it tries to use pre-built libraries from the standard
# search path of the compiler.
#
-# If the location to a source tree is known, the module can be configured
+# If the location to a source tree is known, the module can be configured
# from the *-config.jam files:
#
# using boost : 1.35 : <root>/path-to-boost-root ;
#
# If the location to a pre-built version is known:
#
-# using boost : 1.34
-# : <include>/usr/local/include/boost_1_34
+# using boost : 1.34
+# : <include>/usr/local/include/boost_1_34
# <library>/usr/local/lib
# ;
#
@@ -41,7 +41,7 @@
#
# boost.use-project ;
#
-# The library can be referenced with the project identifier '/boost'. To
+# The library can be referenced with the project identifier '/boost'. To
# reference the program_options you would specify:
#
# exe myexe : mysrc.cpp : <library>/boost//program_options ;
@@ -76,7 +76,7 @@ __debug = None
def debug():
global __debug
if __debug is None:
- __debug = "--debug-configuration" in bjam.variable("ARGV")
+ __debug = "--debug-configuration" in bjam.variable("ARGV")
return __debug
@@ -94,9 +94,9 @@ def debug():
# <include>/path-to-include: The include directory to search.
#
# <library>/path-to-library: The library directory to search.
-#
+#
# <layout>system or <layout>versioned.
-#
+#
# <build-id>my_build_id: The custom build id to use.
#
def init(version, options = None):
@@ -130,7 +130,7 @@ rules = projects.project_rules()
# of the boost library. If the 'version' parameter is omitted either
# the configured default (first in config files) is used or an auto
# configuration will be attempted.
-#
+#
@bjam_signature(([ "version", "?" ], ))
def use_project(version = None):
projects.push_current( projects.current() )
@@ -149,7 +149,7 @@ def use_project(version = None):
root = opts.get('<root>' )
inc = opts.get('<include>')
lib = opts.get('<library>')
-
+
if debug():
print "notice: using boost library {} {}".format( version, opt.raw() )
@@ -171,7 +171,7 @@ def use_project(version = None):
root = bjam.variable("BOOST_ROOT")
module = projects.current().project_module()
-
+
if root:
bjam.call('call-in-module', module, 'use-project', ['boost', root])
else:
@@ -199,14 +199,15 @@ def boost_std(inc = None, lib = None):
tag_prop_set = property_set.create([property.Property('<tag>', tag_std)])
attributes = projects.attributes(projects.current().project_module())
attributes.requirements = attributes.requirements.refine(tag_prop_set)
-
+
alias('headers')
-
+
def boost_lib(lib_name, dyn_link_macro):
if (isinstance(lib_name,str)):
lib_name = [lib_name]
builtin.lib(lib_name, usage_requirements=['<link>shared:<define>{}'.format(dyn_link_macro)])
-
+
+ boost_lib('container' , 'BOOST_CONTAINER_DYN_LINK' )
boost_lib('date_time' , 'BOOST_DATE_TIME_DYN_LINK' )
boost_lib('filesystem' , 'BOOST_FILE_SYSTEM_DYN_LINK' )
boost_lib('graph' , 'BOOST_GRAPH_DYN_LINK' )
@@ -267,7 +268,7 @@ def tag_std(name, type, prop_set):
def tag_maybe(param):
return ['-{}'.format(param)] if param else []
-
+
def tag_system(name, type, prop_set):
return common.format_name(['<base>'] + tag_maybe(__build_id), name, type, prop_set)
diff --git a/tools/build/src/engine/build.bat b/tools/build/src/engine/build.bat
index 0fdb804b6a..e0e742da24 100644
--- a/tools/build/src/engine/build.bat
+++ b/tools/build/src/engine/build.bat
@@ -365,7 +365,7 @@ if NOT "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
if "_%VCINSTALLDIR%_" == "__" (
set "PATH=%BOOST_JAM_TOOLSET_ROOT%bin;%PATH%"
) )
-set "BOOST_JAM_CC=cl /nologo /RTC1 /Zi /MTd /Fobootstrap/ /Fdbootstrap/ -DNT -DYYDEBUG -wd4996 kernel32.lib advapi32.lib user32.lib"
+set "BOOST_JAM_CC=cl /nologo /RTC1 /Zi /MTd /Fobootstrap/ /Fdbootstrap/ -DWINVER=0x0501 -D_WIN32_WINNT=0x0501 -DNT -DYYDEBUG -wd4996 kernel32.lib advapi32.lib user32.lib"
set "BOOST_JAM_OPT_JAM=/Febootstrap\jam0"
set "BOOST_JAM_OPT_MKJAMBASE=/Febootstrap\mkjambase0"
set "BOOST_JAM_OPT_YYACC=/Febootstrap\yyacc0"
diff --git a/tools/build/src/engine/build.jam b/tools/build/src/engine/build.jam
index 8e48ecae45..c7440553b1 100644
--- a/tools/build/src/engine/build.jam
+++ b/tools/build/src/engine/build.jam
@@ -105,6 +105,12 @@ if $(with-python)
}
}
+if $(--python-include) || $(--python-lib)
+{
+ ECHO "Python includes: $(--python-include:J=)" ;
+ ECHO "Python includes: $(--python-lib:J=)" ;
+}
+
# Boehm GC?
if --gc in $(ARGV)
{
diff --git a/tools/build/src/engine/builtins.c b/tools/build/src/engine/builtins.c
index 8ccd083fd2..0a776c44fd 100644
--- a/tools/build/src/engine/builtins.c
+++ b/tools/build/src/engine/builtins.c
@@ -38,7 +38,19 @@
*/
#include <winioctl.h>
#endif
+
+/* With VC8 (VS2005) these are not defined:
+ * FSCTL_GET_REPARSE_POINT (expects WINVER >= 0x0500 _WIN32_WINNT >= 0x0500 )
+ * IO_REPARSE_TAG_SYMLINK (is part of a separate Driver SDK)
+ * So define them explicitily to their expected values.
+ */
+#ifndef FSCTL_GET_REPARSE_POINT
+# define FSCTL_GET_REPARSE_POINT 0x000900a8
+#endif
+#ifndef IO_REPARSE_TAG_SYMLINK
+# define IO_REPARSE_TAG_SYMLINK (0xA000000CL)
#endif
+#endif /* OS_NT */
#if defined(USE_EXECUNIX)
# include <sys/types.h>
@@ -2058,6 +2070,62 @@ void lol_build( LOL * lol, char const * * elements )
#ifdef HAVE_PYTHON
+static LIST *jam_list_from_string(PyObject *a)
+{
+ return list_new( object_new( PyString_AsString( a ) ) );
+}
+
+static LIST *jam_list_from_sequence(PyObject *a)
+{
+ LIST * l = 0;
+
+ int i = 0;
+ int s = PySequence_Size( a );
+
+ for ( ; i < s; ++i )
+ {
+ /* PySequence_GetItem returns new reference. */
+ PyObject * e = PySequence_GetItem( a, i );
+ char * s = PyString_AsString( e );
+ if ( !s )
+ {
+ err_printf( "Invalid parameter type passed from Python\n" );
+ exit( 1 );
+ }
+ l = list_push_back( l, object_new( s ) );
+ Py_DECREF( e );
+ }
+
+ return l;
+}
+
+static void make_jam_arguments_from_python(FRAME* inner, PyObject *args)
+{
+ int i;
+ int size;
+
+ /* Build up the list of arg lists. */
+ frame_init( inner );
+ inner->prev = 0;
+ inner->prev_user = 0;
+ inner->module = bindmodule( constant_python_interface );
+
+ size = PyTuple_Size( args );
+ for (i = 0 ; i < size; ++i)
+ {
+ PyObject * a = PyTuple_GetItem( args, i );
+ if ( PyString_Check( a ) )
+ {
+ lol_add( inner->args, jam_list_from_string(a) );
+ }
+ else if ( PySequence_Check( a ) )
+ {
+ lol_add( inner->args, jam_list_from_sequence(a) );
+ }
+ }
+}
+
+
/*
* Calls the bjam rule specified by name passed in 'args'. The name is looked up
* in the context of bjam's 'python_interface' module. Returns the list of
@@ -2070,50 +2138,14 @@ PyObject * bjam_call( PyObject * self, PyObject * args )
LIST * result;
PARSE * p;
OBJECT * rulename;
-
- /* Build up the list of arg lists. */
- frame_init( inner );
- inner->prev = 0;
- inner->prev_user = 0;
- inner->module = bindmodule( constant_python_interface );
-
- /* Extract the rule name and arguments from 'args'. */
+ PyObject *args_proper;
/* PyTuple_GetItem returns borrowed reference. */
rulename = object_new( PyString_AsString( PyTuple_GetItem( args, 0 ) ) );
- {
- int i = 1;
- int size = PyTuple_Size( args );
- for ( ; i < size; ++i )
- {
- PyObject * a = PyTuple_GetItem( args, i );
- if ( PyString_Check( a ) )
- {
- lol_add( inner->args, list_new( object_new(
- PyString_AsString( a ) ) ) );
- }
- else if ( PySequence_Check( a ) )
- {
- LIST * l = 0;
- int s = PySequence_Size( a );
- int i = 0;
- for ( ; i < s; ++i )
- {
- /* PySequence_GetItem returns new reference. */
- PyObject * e = PySequence_GetItem( a, i );
- char * s = PyString_AsString( e );
- if ( !s )
- {
- err_printf( "Invalid parameter type passed from Python\n" );
- exit( 1 );
- }
- l = list_push_back( l, object_new( s ) );
- Py_DECREF( e );
- }
- lol_add( inner->args, l );
- }
- }
- }
+
+ args_proper = PyTuple_GetSlice(args, 1, PyTuple_Size(args));
+ make_jam_arguments_from_python (inner, args_proper);
+ Py_DECREF(args_proper);
result = evaluate_rule( bindrule( rulename, inner->module), rulename, inner );
object_free( rulename );
diff --git a/tools/build/src/engine/jam.c b/tools/build/src/engine/jam.c
index debfee9cc6..a949e68dbb 100644
--- a/tools/build/src/engine/jam.c
+++ b/tools/build/src/engine/jam.c
@@ -206,6 +206,7 @@ int anyhow = 0;
extern PyObject * bjam_variable ( PyObject * self, PyObject * args );
extern PyObject * bjam_backtrace ( PyObject * self, PyObject * args );
extern PyObject * bjam_caller ( PyObject * self, PyObject * args );
+ int python_optimize = 1; /* Set Python optimzation on by default */
#endif
void regex_done();
@@ -235,7 +236,13 @@ int main( int argc, char * * argv, char * * arg_environ )
--argc;
++argv;
- if ( getoptions( argc, argv, "-:l:m:d:j:p:f:gs:t:ano:qv", optv ) < 0 )
+ #ifdef HAVE_PYTHON
+ #define OPTSTRING "-:l:m:d:j:p:f:gs:t:ano:qvz"
+ #else
+ #define OPTSTRING "-:l:m:d:j:p:f:gs:t:ano:qv"
+ #endif
+
+ if ( getoptions( argc, argv, OPTSTRING, optv ) < 0 )
{
err_printf( "\nusage: %s [ options ] targets...\n\n", progname );
@@ -253,6 +260,9 @@ int main( int argc, char * * argv, char * * arg_environ )
err_printf( "-sx=y Set variable x=y, overriding environment.\n" );
err_printf( "-tx Rebuild x, even if it is up-to-date.\n" );
err_printf( "-v Print the version of jam and exit.\n" );
+ #ifdef HAVE_PYTHON
+ err_printf( "-z Disable Python Optimization and enable asserts\n" );
+ #endif
err_printf( "--x Option is ignored.\n\n" );
exit( EXITBAD );
@@ -318,6 +328,11 @@ int main( int argc, char * * argv, char * * arg_environ )
if ( ( s = getoptval( optv, 'm', 0 ) ) )
globs.max_buf = atoi( s ) * 1024; /* convert to kb */
+ #ifdef HAVE_PYTHON
+ if ( ( s = getoptval( optv, 'z', 0 ) ) )
+ python_optimize = 0; /* disable python optimization */
+ #endif
+
/* Turn on/off debugging */
for ( n = 0; ( s = getoptval( optv, 'd', n ) ); ++n )
{
@@ -364,6 +379,7 @@ int main( int argc, char * * argv, char * * arg_environ )
#ifdef HAVE_PYTHON
{
PROFILE_ENTER( MAIN_PYTHON );
+ Py_OptimizeFlag = python_optimize;
Py_Initialize();
{
static PyMethodDef BjamMethods[] = {
diff --git a/tools/build/src/kernel/bootstrap.py b/tools/build/src/kernel/bootstrap.py
index 2e8dd37b7b..3746b5e667 100644
--- a/tools/build/src/kernel/bootstrap.py
+++ b/tools/build/src/kernel/bootstrap.py
@@ -1,6 +1,6 @@
-# Copyright 2009 Vladimir Prus
+# Copyright 2009 Vladimir Prus
#
-# Distributed under the Boost Software License, Version 1.0.
+# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
import imp
diff --git a/tools/build/src/manager.py b/tools/build/src/manager.py
index 473857fc76..9c1e057339 100644
--- a/tools/build/src/manager.py
+++ b/tools/build/src/manager.py
@@ -14,7 +14,7 @@ class Manager:
""" This class is a facade to the Boost.Build system.
It serves as the root to access all data structures in use.
"""
-
+
def __init__ (self, engine, global_build_dir):
""" Constructor.
engine: the build engine that will actually construct the targets.
@@ -26,7 +26,7 @@ class Manager:
from build.errors import Errors
from b2.util.logger import NullLogger
from build import build_request, property_set, feature
-
+
self.engine_ = engine
self.virtual_targets_ = VirtualTargetRegistry (self)
self.projects_ = ProjectRegistry (self, global_build_dir)
@@ -37,16 +37,16 @@ class Manager:
self.boost_build_path_ = bjam.variable("BOOST_BUILD_PATH")
self.errors_ = Errors()
self.command_line_free_features_ = property_set.empty()
-
+
global the_manager
the_manager = self
-
+
def scanners (self):
return self.scanners_
def engine (self):
return self.engine_
-
+
def virtual_targets (self):
return self.virtual_targets_
@@ -58,7 +58,7 @@ class Manager:
def argv (self):
return self.argv_
-
+
def logger (self):
return self.logger_
@@ -88,7 +88,7 @@ class Manager:
if not targets:
for name, project in self.projects ().projects ():
targets.append (project.target ())
-
+
property_groups = build_request.expand_no_defaults (properties)
virtual_targets = []
@@ -107,4 +107,4 @@ class Manager:
actual_targets = []
for virtual_target in virtual_targets:
actual_targets.extend (virtual_target.actualize ())
-
+
diff --git a/tools/build/src/tools/builtin.py b/tools/build/src/tools/builtin.py
index 14a883e1b6..a149a33320 100644
--- a/tools/build/src/tools/builtin.py
+++ b/tools/build/src/tools/builtin.py
@@ -13,7 +13,7 @@ import b2.build.targets as targets
import sys
from b2.build import feature, property, virtual_target, generators, type, property_set, scanner
from b2.util.utility import *
-from b2.util import path, regex, bjam_signature
+from b2.util import path, regex, bjam_signature, is_iterable_typed
import b2.tools.types
from b2.manager import get_manager
@@ -36,15 +36,15 @@ def variant (name, parents_or_properties, explicit_properties = []):
refining parents' explicit properties with the passed explicit
properties. The result is remembered and will be used if
this variant is used as parent.
-
+
Second, determines the full property set for this variant by
- adding to the explicit properties default values for all properties
+ adding to the explicit properties default values for all properties
which neither present nor are symmetric.
-
+
Lastly, makes appropriate value of 'variant' property expand
to the full property set.
name: Name of the variant
- parents_or_properties: Specifies parent variants, if
+ parents_or_properties: Specifies parent variants, if
'explicit_properties' are given,
and explicit_properties otherwise.
explicit_properties: Explicit properties.
@@ -54,7 +54,7 @@ def variant (name, parents_or_properties, explicit_properties = []):
explicit_properties = parents_or_properties
else:
parents = parents_or_properties
-
+
inherited = property_set.empty()
if parents:
@@ -62,22 +62,22 @@ def variant (name, parents_or_properties, explicit_properties = []):
# between base variants, and there was no demand for so to bother.
if len (parents) > 1:
raise BaseException ("Multiple base variants are not yet supported")
-
+
p = parents[0]
# TODO: the check may be stricter
if not feature.is_implicit_value (p):
raise BaseException ("Invalid base variant '%s'" % p)
-
+
inherited = __variant_explicit_properties[p]
explicit_properties = property_set.create_with_validation(explicit_properties)
explicit_properties = inherited.refine(explicit_properties)
-
+
# Record explicitly specified properties for this variant
# We do this after inheriting parents' properties, so that
# they affect other variants, derived from this one.
__variant_explicit_properties[name] = explicit_properties
-
+
feature.extend('variant', [name])
feature.compose ("<variant>" + name, explicit_properties.all())
@@ -128,20 +128,20 @@ def register_globals ():
feature.feature('target-os', __os_names, ['propagated', 'link-incompatible'])
feature.set_default('target-os', default_host_os())
-
+
feature.feature ('toolset', [], ['implicit', 'propagated' ,'symmetric'])
-
+
feature.feature ('stdlib', ['native'], ['propagated', 'composite'])
-
+
feature.feature ('link', ['shared', 'static'], ['propagated'])
feature.feature ('runtime-link', ['shared', 'static'], ['propagated'])
feature.feature ('runtime-debugging', ['on', 'off'], ['propagated'])
-
-
+
+
feature.feature ('optimization', ['off', 'speed', 'space'], ['propagated'])
feature.feature ('profiling', ['off', 'on'], ['propagated'])
feature.feature ('inlining', ['off', 'on', 'full'], ['propagated'])
-
+
feature.feature ('threading', ['single', 'multi'], ['propagated'])
feature.feature ('rtti', ['on', 'off'], ['propagated'])
feature.feature ('exception-handling', ['on', 'off'], ['propagated'])
@@ -162,21 +162,21 @@ def register_globals ():
feature.feature ('linkflags', [], ['free'])
feature.feature ('archiveflags', [], ['free'])
feature.feature ('version', [], ['free'])
-
+
feature.feature ('location-prefix', [], ['free'])
feature.feature ('action', [], ['free'])
-
+
# The following features are incidental, since
# in themself they have no effect on build products.
# Not making them incidental will result in problems in corner
# cases, for example:
- #
+ #
# unit-test a : a.cpp : <use>b ;
# lib b : a.cpp b ;
- #
- # Here, if <use> is not incidental, we'll decide we have two
+ #
+ # Here, if <use> is not incidental, we'll decide we have two
# targets for a.obj with different properties, and will complain.
#
# Note that making feature incidental does not mean it's ignored. It may
@@ -196,7 +196,7 @@ def register_globals ():
'off', # Do not fail the compilation if there are warnings.
'on'], # Fail the compilation if there are warnings.
['incidental', 'propagated'])
-
+
feature.feature('c++-template-depth',
[str(i) for i in range(64,1024+1,64)] +
[str(i) for i in range(20,1000+1,10)] +
@@ -213,31 +213,31 @@ def register_globals ():
feature.feature ('library-path', [], ['free', 'path']) #order-sensitive ;
# Internal feature.
feature.feature ('library-file', [], ['free', 'dependency'])
-
+
feature.feature ('name', [], ['free'])
feature.feature ('tag', [], ['free'])
feature.feature ('search', [], ['free', 'path']) #order-sensitive ;
feature.feature ('location', [], ['free', 'path'])
-
+
feature.feature ('dll-path', [], ['free', 'path'])
feature.feature ('hardcode-dll-paths', ['true', 'false'], ['incidental'])
-
-
+
+
# This is internal feature which holds the paths of all dependency
# dynamic libraries. On Windows, it's needed so that we can all
# those paths to PATH, when running applications.
# On Linux, it's needed to add proper -rpath-link command line options.
feature.feature ('xdll-path', [], ['free', 'path'])
-
+
#provides means to specify def-file for windows dlls.
feature.feature ('def-file', [], ['free', 'dependency'])
-
+
# This feature is used to allow specific generators to run.
# For example, QT tools can only be invoked when QT library
# is used. In that case, <allow>qt will be in usage requirement
# of the library.
feature.feature ('allow', [], ['free'])
-
+
# The addressing model to generate code for. Currently a limited set only
# specifying the bit size of pointers.
feature.feature('address-model', ['16', '32', '64'], ['propagated', 'optional'])
@@ -261,7 +261,7 @@ def register_globals ():
# HP/PA-RISC
'parisc',
-
+
# Advanced RISC Machines
'arm',
@@ -307,7 +307,7 @@ def register_globals ():
# HP/PA-RISC
'700', '7100', '7100lc', '7200', '7300', '8000',
-
+
# Advanced RISC Machines
'armv2', 'armv2a', 'armv3', 'armv3m', 'armv4', 'armv4t', 'armv5',
'armv5t', 'armv5te', 'armv6', 'armv6j', 'iwmmxt', 'ep9312'],
@@ -318,17 +318,17 @@ def register_globals ():
# The value of 'no' prevents building of a target.
feature.feature('build', ['yes', 'no'], ['optional'])
-
+
# Windows-specific features
feature.feature ('user-interface', ['console', 'gui', 'wince', 'native', 'auto'], [])
feature.feature ('variant', [], ['implicit', 'composite', 'propagated', 'symmetric'])
variant ('debug', ['<optimization>off', '<debug-symbols>on', '<inlining>off', '<runtime-debugging>on'])
- variant ('release', ['<optimization>speed', '<debug-symbols>off', '<inlining>full',
+ variant ('release', ['<optimization>speed', '<debug-symbols>off', '<inlining>full',
'<runtime-debugging>off', '<define>NDEBUG'])
variant ('profile', ['release'], ['<profiling>on', '<debug-symbols>on'])
-
+
reset ()
register_globals ()
@@ -336,19 +336,19 @@ register_globals ()
class SearchedLibTarget (virtual_target.AbstractFileTarget):
def __init__ (self, name, project, shared, search, action):
virtual_target.AbstractFileTarget.__init__ (self, name, 'SEARCHED_LIB', project, action)
-
+
self.shared_ = shared
self.search_ = search
def shared (self):
return self.shared_
-
+
def search (self):
return self.search_
-
+
def actualize_location (self, target):
bjam.call("NOTFILE", target)
-
+
def path (self):
#FIXME: several functions rely on this not being None
return ""
@@ -361,24 +361,24 @@ class CScanner (scanner.Scanner):
self.includes_ = []
for i in includes:
- self.includes_.extend(i.split("&&"))
+ self.includes_.extend(i.split("&&"))
def pattern (self):
return r'#[ \t]*include[ ]*(<(.*)>|"(.*)")'
def process (self, target, matches, binding):
-
+
angle = regex.transform (matches, "<(.*)>")
quoted = regex.transform (matches, '"(.*)"')
g = str(id(self))
b = os.path.normpath(os.path.dirname(binding[0]))
-
+
# Attach binding of including file to included targets.
# When target is directly created from virtual target
# this extra information is unnecessary. But in other
- # cases, it allows to distinguish between two headers of the
- # same name included from different places.
+ # cases, it allows to distinguish between two headers of the
+ # same name included from different places.
# We don't need this extra information for angle includes,
# since they should not depend on including file (we can't
# get literal "." in include path).
@@ -395,11 +395,11 @@ class CScanner (scanner.Scanner):
engine = get_manager().engine()
engine.set_target_variable(angle, "SEARCH", get_value(self.includes_))
engine.set_target_variable(quoted, "SEARCH", [b] + get_value(self.includes_))
-
+
# Just propagate current scanner to includes, in a hope
- # that includes do not change scanners.
+ # that includes do not change scanners.
get_manager().scanners().propagate(self, angle + quoted)
-
+
scanner.register (CScanner, 'include')
type.set_scanner ('CPP', CScanner)
type.set_scanner ('C', CScanner)
@@ -407,15 +407,18 @@ type.set_scanner ('C', CScanner)
# Ported to trunk@47077
class LibGenerator (generators.Generator):
""" The generator class for libraries (target type LIB). Depending on properties it will
- request building of the approapriate specific type -- SHARED_LIB, STATIC_LIB or
+ request building of the approapriate specific type -- SHARED_LIB, STATIC_LIB or
SHARED_LIB.
"""
def __init__(self, id, composing = True, source_types = [], target_types_and_names = ['LIB'], requirements = []):
generators.Generator.__init__(self, id, composing, source_types, target_types_and_names, requirements)
-
- def run(self, project, name, prop_set, sources):
+ def run(self, project, name, prop_set, sources):
+ assert isinstance(project, targets.ProjectTarget)
+ assert isinstance(name, basestring) or name is None
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
# The lib generator is composing, and can be only invoked with
# explicit name. This check is present in generator.run (and so in
# builtin.LinkingGenerator), but duplicate it here to avoid doing
@@ -429,7 +432,7 @@ class LibGenerator (generators.Generator):
('<search>' in properties_grist or '<name>' in properties_grist):
actual_type = 'SEARCHED_LIB'
elif '<file>' in properties_grist:
- # The generator for
+ # The generator for
actual_type = 'LIB'
elif '<link>shared' in properties:
actual_type = 'SHARED_LIB'
@@ -451,7 +454,11 @@ generators.override("builtin.prebuilt", "builtin.lib-generator")
def lib(names, sources=[], requirements=[], default_build=[], usage_requirements=[]):
"""The implementation of the 'lib' rule. Beyond standard syntax that rule allows
simplified: 'lib a b c ;'."""
-
+ assert is_iterable_typed(names, basestring)
+ assert is_iterable_typed(sources, basestring)
+ assert is_iterable_typed(requirements, basestring)
+ assert is_iterable_typed(default_build, basestring)
+ assert is_iterable_typed(usage_requirements, basestring)
if len(names) > 1:
if any(r.startswith('<name>') for r in requirements):
get_manager().errors()("When several names are given to the 'lib' rule\n" +
@@ -490,8 +497,12 @@ class SearchedLibGenerator (generators.Generator):
# is make sure SearchedLibGenerator is not invoked deep in transformation
# search.
generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
-
+
def run(self, project, name, prop_set, sources):
+ assert isinstance(project, targets.ProjectTarget)
+ assert isinstance(name, basestring) or name is None
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
if not name:
return None
@@ -499,12 +510,12 @@ class SearchedLibGenerator (generators.Generator):
# If name is empty, it means we're called not from top-level.
# In this case, we just fail immediately, because SearchedLibGenerator
# cannot be used to produce intermediate targets.
-
+
properties = prop_set.raw ()
shared = '<link>shared' in properties
a = virtual_target.NullAction (project.manager(), prop_set)
-
+
real_name = feature.get_values ('<name>', properties)
if real_name:
real_name = real_name[0]
@@ -515,7 +526,7 @@ class SearchedLibGenerator (generators.Generator):
t = SearchedLibTarget(real_name, project, shared, search, a)
# We return sources for a simple reason. If there's
- # lib png : z : <name>png ;
+ # lib png : z : <name>png ;
# the 'z' target should be returned, so that apps linking to
# 'png' will link to 'z', too.
return(usage_requirements, [b2.manager.get_manager().virtual_targets().register(t)] + sources)
@@ -525,9 +536,14 @@ generators.register (SearchedLibGenerator ())
class PrebuiltLibGenerator(generators.Generator):
def __init__(self, id, composing, source_types, target_types_and_names, requirements):
- generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
+ generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
def run(self, project, name, properties, sources):
+ assert isinstance(project, targets.ProjectTarget)
+ assert isinstance(name, basestring)
+ assert isinstance(properties, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+
f = properties.get("file")
return f + sources
@@ -542,10 +558,11 @@ class CompileAction (virtual_target.Action):
virtual_target.Action.__init__ (self, manager, sources, action_name, prop_set)
def adjust_properties (self, prop_set):
- """ For all virtual targets for the same dependency graph as self,
+ """ For all virtual targets for the same dependency graph as self,
i.e. which belong to the same main target, add their directories
to include path.
"""
+ assert isinstance(prop_set, property_set.PropertySet)
s = self.targets () [0].creating_subvariant ()
return prop_set.add_raw (s.implicit_includes ('include', 'H'))
@@ -560,7 +577,7 @@ class CCompilingGenerator (generators.Generator):
def __init__ (self, id, composing, source_types, target_types_and_names, requirements):
# TODO: (PF) What to do with optional_properties? It seemed that, in the bjam version, the arguments are wrong.
generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
-
+
def action_class (self):
return CompileAction
@@ -574,11 +591,15 @@ class LinkingGenerator (generators.Generator):
"""
def __init__ (self, id, composing, source_types, target_types_and_names, requirements):
generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
-
+
def run (self, project, name, prop_set, sources):
+ assert isinstance(project, targets.ProjectTarget)
+ assert isinstance(name, basestring) or name is None
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
sources.extend(prop_set.get('<library>'))
-
+
# Add <library-path> properties for all searched libraries
extra = []
for s in sources:
@@ -589,8 +610,8 @@ class LinkingGenerator (generators.Generator):
# It's possible that we have libraries in sources which did not came
# from 'lib' target. For example, libraries which are specified
# just as filenames as sources. We don't have xdll-path properties
- # for such target, but still need to add proper dll-path properties.
- extra_xdll_path = []
+ # for such target, but still need to add proper dll-path properties.
+ extra_xdll_path = []
for s in sources:
if type.is_derived (s.type (), 'SHARED_LIB') and not s.action ():
# Unfortunately, we don't have a good way to find the path
@@ -602,7 +623,7 @@ class LinkingGenerator (generators.Generator):
# Hardcode DLL paths only when linking executables.
# Pros: do not need to relink libraries when installing.
# Cons: "standalone" libraries (plugins, python extensions) can not
- # hardcode paths to dependent libraries.
+ # hardcode paths to dependent libraries.
if prop_set.get('<hardcode-dll-paths>') == ['true'] \
and type.is_derived(self.target_types_ [0], 'EXE'):
xdll_path = prop_set.get('<xdll-path>')
@@ -610,64 +631,69 @@ class LinkingGenerator (generators.Generator):
for sp in extra_xdll_path)
extra.extend(property.Property('<dll-path>', sp) \
for sp in xdll_path)
-
+
if extra:
- prop_set = prop_set.add_raw (extra)
+ prop_set = prop_set.add_raw (extra)
result = generators.Generator.run(self, project, name, prop_set, sources)
-
+
if result:
ur = self.extra_usage_requirements(result, prop_set)
ur = ur.add(property_set.create(['<xdll-path>' + p for p in extra_xdll_path]))
else:
return None
return (ur, result)
-
+
def extra_usage_requirements (self, created_targets, prop_set):
-
+ assert is_iterable_typed(created_targets, virtual_target.VirtualTarget)
+ assert isinstance(prop_set, property_set.PropertySet)
+
result = property_set.empty ()
extra = []
-
+
# Add appropriate <xdll-path> usage requirements.
raw = prop_set.raw ()
if '<link>shared' in raw:
paths = []
-
- # TODO: is it safe to use the current directory? I think we should use
+
+ # TODO: is it safe to use the current directory? I think we should use
# another mechanism to allow this to be run from anywhere.
pwd = os.getcwd()
-
+
for t in created_targets:
if type.is_derived(t.type(), 'SHARED_LIB'):
paths.append(path.root(path.make(t.path()), pwd))
extra += replace_grist(paths, '<xdll-path>')
-
+
# We need to pass <xdll-path> features that we've got from sources,
# because if shared library is built, exe which uses it must know paths
# to other shared libraries this one depends on, to be able to find them
# all at runtime.
-
+
# Just pass all features in property_set, it's theorically possible
# that we'll propagate <xdll-path> features explicitly specified by
- # the user, but then the user's to blaim for using internal feature.
+ # the user, but then the user's to blaim for using internal feature.
values = prop_set.get('<xdll-path>')
extra += replace_grist(values, '<xdll-path>')
-
+
if extra:
result = property_set.create(extra)
return result
def generated_targets (self, sources, prop_set, project, name):
-
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert isinstance(project, targets.ProjectTarget)
+ assert isinstance(name, basestring)
# sources to pass to inherited rule
sources2 = []
# sources which are libraries
libraries = []
-
+
# Searched libraries are not passed as argument to linker
# but via some option. So, we pass them to the action
- # via property.
+ # via property.
fsa = []
fst = []
for s in sources:
@@ -688,7 +714,7 @@ class LinkingGenerator (generators.Generator):
if fst:
add.append("<find-static-library>" + '&&'.join(fst))
- spawn = generators.Generator.generated_targets(self, sources2, prop_set.add_raw(add), project, name)
+ spawn = generators.Generator.generated_targets(self, sources2, prop_set.add_raw(add), project, name)
return spawn
@@ -701,12 +727,12 @@ class ArchiveGenerator (generators.Generator):
"""
def __init__ (self, id, composing, source_types, target_types_and_names, requirements):
generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
-
+
def run (self, project, name, prop_set, sources):
sources += prop_set.get ('<library>')
-
+
result = generators.Generator.run (self, project, name, prop_set, sources)
-
+
return result
diff --git a/tools/build/src/tools/cast.py b/tools/build/src/tools/cast.py
index 8f053f110c..0d21edb0b6 100644
--- a/tools/build/src/tools/cast.py
+++ b/tools/build/src/tools/cast.py
@@ -25,18 +25,22 @@
# > cast, as defining a new target type + generator for that type is somewhat
# > simpler than defining a main target rule.
-import b2.build.targets as targets
-import b2.build.virtual_target as virtual_target
+from b2.build import targets, virtual_target, property_set
from b2.manager import get_manager
-from b2.util import bjam_signature
+from b2.util import bjam_signature, is_iterable_typed
+
class CastTargetClass(targets.TypedTarget):
- def construct(name, source_targets, ps):
+ def construct(self, name, source_targets, ps):
+ assert isinstance(name, basestring)
+ assert is_iterable_typed(source_targets, virtual_target.VirtualTarget)
+ assert isinstance(ps, property_set.PropertySet)
+
result = []
for s in source_targets:
- if not isinstance(s, virtual_targets.FileTarget):
+ if not isinstance(s, virtual_target.FileTarget):
get_manager().errors()("Source to the 'cast' metatager is not a file")
if s.action():
@@ -46,18 +50,17 @@ class CastTargetClass(targets.TypedTarget):
r = s.clone_with_different_type(self.type())
result.append(get_manager().virtual_targets().register(r))
- return result
-
+ return property_set.empty(), result
@bjam_signature((["name", "type"], ["sources", "*"], ["requirements", "*"],
["default_build", "*"], ["usage_requirements", "*"]))
def cast(name, type, sources, requirements, default_build, usage_requirements):
-
+
from b2.manager import get_manager
t = get_manager().targets()
-
+
project = get_manager().projects().current()
-
+
return t.main_target_alternative(
CastTargetClass(name, project, type,
t.main_target_sources(sources, name),
diff --git a/tools/build/src/tools/common.py b/tools/build/src/tools/common.py
index 443b3e92d2..3f30baa568 100644
--- a/tools/build/src/tools/common.py
+++ b/tools/build/src/tools/common.py
@@ -17,31 +17,31 @@ import os.path
import sys
# for some reason this fails on Python 2.7(r27:82525)
-# from b2.build import virtual_target
+# from b2.build import virtual_target
import b2.build.virtual_target
from b2.build import feature, type
from b2.util.utility import *
-from b2.util import path
+from b2.util import path, is_iterable_typed
__re__before_first_dash = re.compile ('([^-]*)-')
def reset ():
""" Clear the module state. This is mainly for testing purposes.
Note that this must be called _after_ resetting the module 'feature'.
- """
+ """
global __had_unspecified_value, __had_value, __declared_subfeature
global __init_loc
global __all_signatures, __debug_configuration, __show_configuration
-
+
# Stores toolsets without specified initialization values.
__had_unspecified_value = {}
# Stores toolsets with specified initialization values.
__had_value = {}
-
+
# Stores toolsets with declared subfeatures.
__declared_subfeature = {}
-
+
# Stores all signatures of the toolsets.
__all_signatures = {}
@@ -70,7 +70,7 @@ def reset ():
"HAIKU": "LIBRARY_PATH"}
global __shared_library_path_variable
__shared_library_path_variable = m.get(OS, "LD_LIBRARY_PATH")
-
+
reset()
def shared_library_path_variable():
@@ -112,6 +112,7 @@ class Configurations(object):
Returns True if the configuration has been added and False if
it already exists. Reports an error if the configuration is 'used'.
"""
+ assert isinstance(id, basestring)
if id in self.used_:
#FIXME
errors.error("common: the configuration '$(id)' is in use")
@@ -132,6 +133,7 @@ class Configurations(object):
'used' and False if it the state wasn't changed. Reports an error
if the configuration isn't known.
"""
+ assert isinstance(id, basestring)
if id not in self.all_:
#FIXME:
errors.error("common: the configuration '$(id)' is not known")
@@ -154,10 +156,15 @@ class Configurations(object):
def get(self, id, param):
""" Returns the value of a configuration parameter. """
+ assert isinstance(id, basestring)
+ assert isinstance(param, basestring)
return self.params_.get(param, {}).get(id)
def set (self, id, param, value):
""" Sets the value of a configuration parameter. """
+ assert isinstance(id, basestring)
+ assert isinstance(param, basestring)
+ assert is_iterable_typed(value, basestring)
self.params_.setdefault(param, {})[id] = value
# Ported from trunk@47174
@@ -174,14 +181,11 @@ def check_init_parameters(toolset, requirement, *args):
The return value from this rule is a condition to be used for flags settings.
"""
+ assert isinstance(toolset, basestring)
+ assert is_iterable_typed(requirement, basestring)
from b2.build import toolset as b2_toolset
if requirement is None:
requirement = []
- # The type checking here is my best guess about
- # what the types should be.
- assert(isinstance(toolset, str))
- # iterable and not a string, allows for future support of sets
- assert(not isinstance(requirement, basestring) and hasattr(requirement, '__contains__'))
sig = toolset
condition = replace_grist(toolset, '<toolset>')
subcondition = []
@@ -193,7 +197,7 @@ def check_init_parameters(toolset, requirement, *args):
value = arg[1]
assert(isinstance(name, str))
assert(isinstance(value, str) or value is None)
-
+
str_toolset_name = str((toolset, name))
# FIXME: is this the correct translation?
@@ -235,7 +239,7 @@ def check_init_parameters(toolset, requirement, *args):
__had_unspecified_value[str_toolset_name] = True
if value == None: value = ''
-
+
sig = sig + value + '-'
# if a requirement is specified, the signature should be unique
@@ -245,12 +249,12 @@ def check_init_parameters(toolset, requirement, *args):
if __all_signatures.has_key(sig):
message = "duplicate initialization of '%s' with the following parameters: " % toolset
-
+
for arg in args:
name = arg[0]
value = arg[1]
if value == None: value = '<unspecified>'
-
+
message += "'%s' = '%s'\n" % (name, value)
raise BaseException(message)
@@ -285,38 +289,35 @@ def get_invocation_command_nodefault(
'user-provided-command' is not given, tries to find binary named 'tool' in
PATH and in the passed 'additional-path'. Otherwise, verifies that the first
element of 'user-provided-command' is an existing program.
-
+
This rule returns the command to be used when invoking the tool. If we can't
find the tool, a warning is issued. If 'path-last' is specified, PATH is
checked after 'additional-paths' when searching for 'tool'.
"""
- assert(isinstance(toolset, str))
- assert(isinstance(tool, str))
- assert(isinstance(user_provided_command, list))
- if additional_paths is not None:
- assert(isinstance(additional_paths, list))
- assert(all([isinstance(path, str) for path in additional_paths]))
- assert(all(isinstance(path, str) for path in additional_paths))
- assert(isinstance(path_last, bool))
-
+ assert isinstance(toolset, basestring)
+ assert isinstance(tool, basestring)
+ assert is_iterable_typed(user_provided_command, basestring)
+ assert is_iterable_typed(additional_paths, basestring) or additional_paths is None
+ assert isinstance(path_last, (int, bool))
+
if not user_provided_command:
- command = find_tool(tool, additional_paths, path_last)
+ command = find_tool(tool, additional_paths, path_last)
if not command and __debug_configuration:
print "warning: toolset", toolset, "initialization: can't find tool, tool"
#FIXME
#print "warning: initialized from" [ errors.nearest-user-location ] ;
else:
command = check_tool(user_provided_command)
- assert(isinstance(command, list))
- command=' '.join(command)
if not command and __debug_configuration:
print "warning: toolset", toolset, "initialization:"
print "warning: can't find user-provided command", user_provided_command
#FIXME
#ECHO "warning: initialized from" [ errors.nearest-user-location ]
+ command = []
+ command = ' '.join(command)
assert(isinstance(command, str))
-
+
return command
# ported from trunk@47174
@@ -325,14 +326,11 @@ def get_invocation_command(toolset, tool, user_provided_command = [],
""" Same as get_invocation_command_nodefault, except that if no tool is found,
returns either the user-provided-command, if present, or the 'tool' parameter.
"""
-
- assert(isinstance(toolset, str))
- assert(isinstance(tool, str))
- assert(isinstance(user_provided_command, list))
- if additional_paths is not None:
- assert(isinstance(additional_paths, list))
- assert(all([isinstance(path, str) for path in additional_paths]))
- assert(isinstance(path_last, bool))
+ assert isinstance(toolset, basestring)
+ assert isinstance(tool, basestring)
+ assert is_iterable_typed(user_provided_command, basestring)
+ assert is_iterable_typed(additional_paths, basestring) or additional_paths is None
+ assert isinstance(path_last, (int, bool))
result = get_invocation_command_nodefault(toolset, tool,
user_provided_command,
@@ -346,7 +344,7 @@ def get_invocation_command(toolset, tool, user_provided_command = [],
result = tool
assert(isinstance(result, str))
-
+
return result
# ported from trunk@47281
@@ -356,6 +354,7 @@ def get_absolute_tool_path(command):
return the absolute path to the command. This works even if commnad
has not path element and is present in PATH.
"""
+ assert isinstance(command, basestring)
if os.path.dirname(command):
return os.path.dirname(command)
else:
@@ -376,9 +375,9 @@ def find_tool(name, additional_paths = [], path_last = False):
Otherwise, returns the empty string. If 'path_last' is specified,
path is checked after 'additional_paths'.
"""
- assert(isinstance(name, str))
- assert(isinstance(additional_paths, list))
- assert(isinstance(path_last, bool))
+ assert isinstance(name, basestring)
+ assert is_iterable_typed(additional_paths, basestring)
+ assert isinstance(path_last, (int, bool))
programs = path.programs_path()
match = path.glob(programs, [name, name + '.exe'])
@@ -407,7 +406,7 @@ def check_tool_aux(command):
""" Checks if 'command' can be found either in path
or is a full name to an existing file.
"""
- assert(isinstance(command, str))
+ assert isinstance(command, basestring)
dirname = os.path.dirname(command)
if dirname:
if os.path.exists(command):
@@ -425,13 +424,12 @@ def check_tool_aux(command):
# ported from trunk@47281
def check_tool(command):
- """ Checks that a tool can be invoked by 'command'.
+ """ Checks that a tool can be invoked by 'command'.
If command is not an absolute path, checks if it can be found in 'path'.
If comand is absolute path, check that it exists. Returns 'command'
if ok and empty string otherwise.
"""
- assert(isinstance(command, list))
- assert(all(isinstance(c, str) for c in command))
+ assert is_iterable_typed(command, basestring)
#FIXME: why do we check the first and last elements????
if check_tool_aux(command[0]) or check_tool_aux(command[-1]):
return command
@@ -449,11 +447,10 @@ def handle_options(tool, condition, command, options):
"""
from b2.build import toolset
- assert(isinstance(tool, str))
- assert(isinstance(condition, list))
- assert(isinstance(command, str))
- assert(isinstance(options, list))
- assert(command)
+ assert isinstance(tool, basestring)
+ assert is_iterable_typed(condition, basestring)
+ assert command and isinstance(command, basestring)
+ assert is_iterable_typed(options, basestring)
toolset.flags(tool, 'CONFIG_COMMAND', condition, [command])
toolset.flags(tool + '.compile', 'OPTIONS', condition, feature.get_values('<compileflags>', options))
toolset.flags(tool + '.compile.c', 'OPTIONS', condition, feature.get_values('<cflags>', options))
@@ -490,8 +487,8 @@ def variable_setting_command(variable, value):
words, on Unix systems, the variable is exported, which is consistent with the
only possible behavior on Windows systems.
"""
- assert(isinstance(variable, str))
- assert(isinstance(value, str))
+ assert isinstance(variable, basestring)
+ assert isinstance(value, basestring)
if os_name() == 'NT':
return "set " + variable + "=" + value + os.linesep
@@ -533,8 +530,8 @@ def path_variable_setting_command(variable, paths):
Returns a command to sets a named shell path variable to the given NATIVE
paths on the current platform.
"""
- assert(isinstance(variable, str))
- assert(isinstance(paths, list))
+ assert isinstance(variable, basestring)
+ assert is_iterable_typed(paths, basestring)
sep = os.path.pathsep
return variable_setting_command(variable, sep.join(paths))
@@ -542,7 +539,10 @@ def prepend_path_variable_command(variable, paths):
"""
Returns a command that prepends the given paths to the named path variable on
the current platform.
- """
+ """
+ assert isinstance(variable, basestring)
+ assert is_iterable_typed(paths, basestring)
+
return path_variable_setting_command(variable,
paths + os.environ.get(variable, "").split(os.pathsep))
@@ -562,6 +562,7 @@ __mkdir_set = set()
__re_windows_drive = re.compile(r'^.*:\$')
def mkdir(engine, target):
+ assert isinstance(target, basestring)
# If dir exists, do not update it. Do this even for $(DOT).
bjam.call('NOUPDATE', target)
@@ -590,7 +591,7 @@ def mkdir(engine, target):
if os_name() == 'NT':
if(__re_windows_drive.match(s)):
s = ''
-
+
if s:
if s != target:
engine.add_dependency(target, s)
@@ -642,9 +643,12 @@ def format_name(format, name, target_type, prop_set):
The returned name also has the target type specific prefix and suffix which
puts it in a ready form to use as the value from a custom tag rule.
"""
- assert(isinstance(format, list))
- assert(isinstance(name, str))
- assert(isinstance(target_type, str) or not type)
+ if __debug__:
+ from ..build.property_set import PropertySet
+ assert is_iterable_typed(format, basestring)
+ assert isinstance(name, basestring)
+ assert isinstance(target_type, basestring)
+ assert isinstance(prop_set, PropertySet)
# assert(isinstance(prop_set, property_set.PropertySet))
if type.is_derived(target_type, 'LIB'):
result = "" ;
@@ -653,7 +657,7 @@ def format_name(format, name, target_type, prop_set):
if grist == '<base>':
result += os.path.basename(name)
elif grist == '<toolset>':
- result += join_tag(get_value(f),
+ result += join_tag(get_value(f),
toolset_tag(name, target_type, prop_set))
elif grist == '<threading>':
result += join_tag(get_value(f),
@@ -690,6 +694,8 @@ def format_name(format, name, target_type, prop_set):
return result
def join_tag(joiner, tag):
+ assert isinstance(joiner, basestring)
+ assert isinstance(tag, basestring)
if tag:
if not joiner: joiner = '-'
return joiner + tag
@@ -698,6 +704,11 @@ def join_tag(joiner, tag):
__re_toolset_version = re.compile(r"<toolset.*version>(\d+)[.](\d*)")
def toolset_tag(name, target_type, prop_set):
+ if __debug__:
+ from ..build.property_set import PropertySet
+ assert isinstance(name, basestring)
+ assert isinstance(target_type, basestring)
+ assert isinstance(prop_set, PropertySet)
tag = ''
properties = prop_set.raw()
@@ -708,7 +719,7 @@ def toolset_tag(name, target_type, prop_set):
elif tools.startswith('como'): tag += 'como'
elif tools.startswith('cw'): tag += 'cw'
elif tools.startswith('darwin'): tag += 'xgcc'
- elif tools.startswith('edg'): tag += edg
+ elif tools.startswith('edg'): tag += 'edg'
elif tools.startswith('gcc'):
flavor = prop_set.get('<toolset-gcc:flavor>')
''.find
@@ -764,6 +775,11 @@ def toolset_tag(name, target_type, prop_set):
def threading_tag(name, target_type, prop_set):
+ if __debug__:
+ from ..build.property_set import PropertySet
+ assert isinstance(name, basestring)
+ assert isinstance(target_type, basestring)
+ assert isinstance(prop_set, PropertySet)
tag = ''
properties = prop_set.raw()
if '<threading>multi' in properties: tag = 'mt'
@@ -772,6 +788,11 @@ def threading_tag(name, target_type, prop_set):
def runtime_tag(name, target_type, prop_set ):
+ if __debug__:
+ from ..build.property_set import PropertySet
+ assert isinstance(name, basestring)
+ assert isinstance(target_type, basestring)
+ assert isinstance(prop_set, PropertySet)
tag = ''
properties = prop_set.raw()
@@ -847,13 +868,13 @@ def init(manager):
__CP = 'cp'
__IGNORE = ''
__LN = 'ln'
-
+
engine.register_action("common.Clean", __RM + ' "$(>)"',
flags=['piecemeal', 'together', 'existing'])
engine.register_action("common.copy", __CP + ' "$(>)" "$(<)"')
engine.register_action("common.RmTemps", __RM + ' "$(>)" ' + __IGNORE,
flags=['quietly', 'updated', 'piecemeal', 'together'])
- engine.register_action("common.hard-link",
+ engine.register_action("common.hard-link",
__RM + ' "$(<)" 2$(NULL_OUT) $(NULL_OUT)' + os.linesep +
__LN + ' "$(>)" "$(<)" $(NULL_OUT)')
diff --git a/tools/build/src/tools/darwin.py b/tools/build/src/tools/darwin.py
index c29196060b..f03d63f350 100644
--- a/tools/build/src/tools/darwin.py
+++ b/tools/build/src/tools/darwin.py
@@ -21,11 +21,11 @@ def init (version = None, command = None, options = None):
options = to_seq (options)
condition = common.check_init_parameters ('darwin', None, ('version', version))
-
+
command = common.get_invocation_command ('darwin', 'g++', command)
common.handle_options ('darwin', condition, command, options)
-
+
gcc.init_link_flags ('darwin', 'darwin', condition)
# Darwin has a different shared library suffix
diff --git a/tools/build/src/tools/doxproc.py b/tools/build/src/tools/doxproc.py
index 4cbd5edd2f..c41d7fde15 100644
--- a/tools/build/src/tools/doxproc.py
+++ b/tools/build/src/tools/doxproc.py
@@ -17,7 +17,7 @@ import glob
import re
import xml.dom.minidom
-
+
def usage():
print '''
Usage:
@@ -49,11 +49,11 @@ def get_args( argv = sys.argv[1:] ):
}
( option_pairs, other ) = getopt.getopt( argv, '', spec )
map( lambda x: options.__setitem__( x[0], x[1] ), option_pairs )
-
+
if options.has_key( '--help' ):
usage()
sys.exit(1)
-
+
return {
'xmldir' : options['--xmldir'],
'output' : options['--output'],
@@ -69,7 +69,7 @@ def if_attribute(node, attribute, true_value, false_value=None):
return false_value
class Doxygen2BoostBook:
-
+
def __init__( self, **kwargs ):
##
self.args = kwargs
@@ -113,17 +113,17 @@ class Doxygen2BoostBook:
self.idmap = {}
#~ Marks generation, to prevent redoing it.
self.generated = False
-
+
#~ Add an Doxygen generated XML document to the content we are translating.
def addDox( self, document ):
self._translateNode(document.documentElement)
-
+
#~ Turns the internal XML tree into an output UTF-8 string.
def tostring( self ):
self._generate()
#~ return self.boostbook.toprettyxml(' ')
return self.boostbook.toxml('utf-8')
-
+
#~ Does post-processing on the partial generated content to generate additional info
#~ now that we have the complete source documents.
def _generate( self ):
@@ -146,7 +146,7 @@ class Doxygen2BoostBook:
del self.idmap[self.symbols[symbol]['id']]
container.appendChild(self.symbols[symbol]['dom'])
self._rewriteIDs(self.boostbook.documentElement)
-
+
#~ Rewrite the various IDs from Doxygen references to the newly created
#~ BoostBook references.
def _rewriteIDs( self, node ):
@@ -166,7 +166,7 @@ class Doxygen2BoostBook:
self._rewriteIDs(node.firstChild)
if node.nextSibling:
self._rewriteIDs(node.nextSibling)
-
+
def _resolveContainer( self, cpp, root ):
container = root
for ns in cpp['namespace']:
@@ -181,11 +181,11 @@ class Doxygen2BoostBook:
break
container = node
return container
-
+
def _setID( self, id, name ):
self.idmap[id] = name.replace('::','.').replace('/','.')
#~ print '--| setID:',id,'::',self.idmap[id]
-
+
#~ Translate a given node within a given context.
#~ The translation dispatches to a local method of the form
#~ "_translate[_context0,...,_contextN]", and the keyword args are
@@ -208,7 +208,7 @@ class Doxygen2BoostBook:
if hasattr(self,name):
return getattr(self,name)(node,**kwargs)
return None
-
+
#~ Translates the children of the given parent node, appending the results
#~ to the indicated target. For nodes not translated by the translation method
#~ it copies the child over and recurses on that child to translate any
@@ -225,7 +225,7 @@ class Doxygen2BoostBook:
child.data = re.sub(r'\s+',' ',child.data)
target.appendChild(child)
self._translateChildren(n,target=child)
-
+
#~ Translate the given node as a description, into the description subnode
#~ of the target. If no description subnode is present in the target it
#~ is created.
@@ -235,7 +235,7 @@ class Doxygen2BoostBook:
description = target.appendChild(self._createNode(tag))
self._translateChildren(node,target=description)
return description
-
+
#~ Top level translation of: <doxygen ...>...</doxygen>,
#~ translates the children.
def _translate_doxygen( self, node ):
@@ -246,7 +246,7 @@ class Doxygen2BoostBook:
if newNode:
result.append(newNode)
return result
-
+
#~ Top level translation of:
#~ <doxygenindex ...>
#~ <compound ...>
@@ -294,7 +294,7 @@ class Doxygen2BoostBook:
self._translate_index_(entries,target=self.section['index'])
self._translate_index_(classes,target=self.section['classes'])
return None
-
+
#~ Translate a set of index entries in the BoostBook output. The output
#~ is grouped into groups of the first letter of the entry names.
def _translate_index_(self, entries, target=None, **kwargs ):
@@ -314,12 +314,12 @@ class Doxygen2BoostBook:
'link',entries[i]['compoundname'],linkend=entries[i]['id']))
ie.appendChild(self.boostbook.createTextNode(')'))
i += 1
-
+
#~ Translate a <compounddef ...>...</compounddef>,
#~ by retranslating with the "kind" of compounddef.
def _translate_compounddef( self, node, target=None, **kwargs ):
return self._translateNode(node,node.getAttribute('kind'))
-
+
#~ Translate a <compounddef kind="namespace"...>...</compounddef>. For
#~ namespaces we just collect the information for later use as there is no
#~ currently namespaces are not included in the BoostBook format. In the future
@@ -343,12 +343,12 @@ class Doxygen2BoostBook:
self.symbols[namespace['name']] = namespace
#~ self._setID(namespace['id'],namespace['name'])
return None
-
+
#~ Translate a <compounddef kind="class"...>...</compounddef>, which
#~ forwards to the kind=struct as they are the same.
def _translate_compounddef_class( self, node, target=None, **kwargs ):
return self._translate_compounddef_struct(node,tag='class',target=target,**kwargs)
-
+
#~ Translate a <compounddef kind="struct"...>...</compounddef> into:
#~ <header id="?" name="?">
#~ <struct name="?">
@@ -381,7 +381,7 @@ class Doxygen2BoostBook:
self._translateNode(n,target=struct,scope=compoundname['compoundname'])
result = struct
return result
-
+
#~ Translate a <compounddef ...><includes ...>...</includes></compounddef>,
def _translate_compounddef_includes_( self, node, target=None, **kwargs ):
name = node.firstChild.data
@@ -395,7 +395,7 @@ class Doxygen2BoostBook:
name=name)
}
return None
-
+
#~ Translate a <basecompoundref...>...</basecompoundref> into:
#~ <inherit access="?">
#~ ...
@@ -405,7 +405,7 @@ class Doxygen2BoostBook:
access=ref.getAttribute('prot')))
self._translateChildren(ref,target=inherit)
return
-
+
#~ Translate:
#~ <templateparamlist>
#~ <param>
@@ -448,7 +448,7 @@ class Doxygen2BoostBook:
value = self._getData(defval)
templateParam.appendChild(self._createText('default',value))
return template
-
+
#~ Translate:
#~ <briefdescription>...</briefdescription>
#~ Into:
@@ -456,37 +456,37 @@ class Doxygen2BoostBook:
def _translate_briefdescription( self, brief, target=None, **kwargs ):
self._translateDescription(brief,target=target,**kwargs)
return self._translateDescription(brief,target=target,tag='purpose',**kwargs)
-
+
#~ Translate:
#~ <detaileddescription>...</detaileddescription>
#~ Into:
#~ <description>...</description>
def _translate_detaileddescription( self, detailed, target=None, **kwargs ):
return self._translateDescription(detailed,target=target,**kwargs)
-
+
#~ Translate:
#~ <sectiondef kind="?">...</sectiondef>
#~ With kind specific translation.
def _translate_sectiondef( self, sectiondef, target=None, **kwargs ):
self._translateNode(sectiondef,sectiondef.getAttribute('kind'),target=target,**kwargs)
-
+
#~ Translate non-function sections.
def _translate_sectiondef_x_( self, sectiondef, target=None, **kwargs ):
for n in sectiondef.childNodes:
if hasattr(n,'getAttribute'):
self._translateNode(n,n.getAttribute('kind'),target=target,**kwargs)
return None
-
+
#~ Translate:
#~ <sectiondef kind="public-type">...</sectiondef>
def _translate_sectiondef_public_type( self, sectiondef, target=None, **kwargs ):
return self._translate_sectiondef_x_(sectiondef,target=target,**kwargs)
-
+
#~ Translate:
#~ <sectiondef kind="public-sttrib">...</sectiondef>
def _translate_sectiondef_public_attrib( self, sectiondef, target=None, **kwargs):
return self._translate_sectiondef_x_(sectiondef,target=target,**kwargs)
-
+
#~ Translate:
#~ <sectiondef kind="?-func">...</sectiondef>
#~ All the various function group translations end up here for which
@@ -500,31 +500,31 @@ class Doxygen2BoostBook:
if hasattr(n,'getAttribute'):
self._translateNode(n,n.getAttribute('kind'),target=members,**kwargs)
return members
-
+
#~ Translate:
#~ <sectiondef kind="public-func">...</sectiondef>
def _translate_sectiondef_public_func( self, sectiondef, target=None, **kwargs ):
return self._translate_sectiondef_func_(sectiondef,
name='public member functions',target=target,**kwargs)
-
+
#~ Translate:
#~ <sectiondef kind="public-static-func">...</sectiondef>
def _translate_sectiondef_public_static_func( self, sectiondef, target=None, **kwargs):
return self._translate_sectiondef_func_(sectiondef,
name='public static functions',target=target,**kwargs)
-
+
#~ Translate:
#~ <sectiondef kind="protected-func">...</sectiondef>
def _translate_sectiondef_protected_func( self, sectiondef, target=None, **kwargs ):
return self._translate_sectiondef_func_(sectiondef,
name='protected member functions',target=target,**kwargs)
-
+
#~ Translate:
#~ <sectiondef kind="private-static-func">...</sectiondef>
def _translate_sectiondef_private_static_func( self, sectiondef, target=None, **kwargs):
return self._translate_sectiondef_func_(sectiondef,
name='private static functions',target=target,**kwargs)
-
+
#~ Translate:
#~ <sectiondef kind="public-func">...</sectiondef>
def _translate_sectiondef_private_func( self, sectiondef, target=None, **kwargs ):
@@ -536,7 +536,7 @@ class Doxygen2BoostBook:
def _translate_sectiondef_user_defined( self, sectiondef, target=None, **kwargs ):
return self._translate_sectiondef_func_(sectiondef,
name=self._getChildData('header', root=sectiondef),target=target,**kwargs)
-
+
#~ Translate:
#~ <memberdef kind="typedef" id="?">
#~ <name>...</name>
@@ -554,7 +554,7 @@ class Doxygen2BoostBook:
typedef_type = typedef.appendChild(self._createNode('type'))
self._translate_type(self._getChild('type',root=memberdef),target=typedef_type)
return typedef
-
+
#~ Translate:
#~ <memberdef kind="function" id="?" const="?" static="?" explicit="?" inline="?">
#~ <name>...</name>
@@ -594,13 +594,13 @@ class Doxygen2BoostBook:
for n in memberdef.childNodes:
self._translateNode(memberdef,'function',n,target=method)
return method
-
+
#~ Translate:
#~ <memberdef kind="function"...><templateparamlist>...</templateparamlist></memberdef>
def _translate_memberdef_function_templateparamlist(
self, templateparamlist, target=None, **kwargs ):
return self._translate_templateparamlist(templateparamlist,target=target,**kwargs)
-
+
#~ Translate:
#~ <memberdef kind="function"...><type>...</type></memberdef>
#~ To:
@@ -611,7 +611,7 @@ class Doxygen2BoostBook:
if methodType.hasChildNodes():
target.appendChild(methodType)
return methodType
-
+
#~ Translate:
#~ <memberdef kind="function"...><briefdescription>...</briefdescription></memberdef>
def _translate_memberdef_function_briefdescription( self, description, target=None, **kwargs ):
@@ -621,22 +621,22 @@ class Doxygen2BoostBook:
## on the previous line, don't bother with the repetition.
# result = self._translateDescription(description,target=target,tag='purpose',**kwargs)
return result
-
+
#~ Translate:
#~ <memberdef kind="function"...><detaileddescription>...</detaileddescription></memberdef>
def _translate_memberdef_function_detaileddescription( self, description, target=None, **kwargs ):
return self._translateDescription(description,target=target,**kwargs)
-
+
#~ Translate:
#~ <memberdef kind="function"...><inbodydescription>...</inbodydescription></memberdef>
def _translate_memberdef_function_inbodydescription( self, description, target=None, **kwargs ):
return self._translateDescription(description,target=target,**kwargs)
-
+
#~ Translate:
#~ <memberdef kind="function"...><param>...</param></memberdef>
def _translate_memberdef_function_param( self, param, target=None, **kwargs ):
return self._translate_param(param,target=target,**kwargs)
-
+
#~ Translate:
#~ <memberdef kind="variable" id="?">
#~ <name>...</name>
@@ -654,7 +654,7 @@ class Doxygen2BoostBook:
name=self._getChildData('name',root=memberdef)))
data_member_type = data_member.appendChild(self._createNode('type'))
self._translate_type(self._getChild('type',root=memberdef),target=data_member_type)
-
+
#~ Translate:
#~ <memberdef kind="enum" id="?">
#~ <name>...</name>
@@ -673,7 +673,7 @@ class Doxygen2BoostBook:
for n in memberdef.childNodes:
self._translateNode(memberdef,'enum',n,target=enum,scope=scope,**kwargs)
return enum
-
+
#~ Translate:
#~ <memberdef kind="enum"...>
#~ <enumvalue id="?">
@@ -696,7 +696,7 @@ class Doxygen2BoostBook:
self._translateChildren(initializer,
target=target.appendChild(self._createNode('default')))
return value
-
+
#~ Translate:
#~ <param>
#~ <type>...</type>
@@ -717,12 +717,12 @@ class Doxygen2BoostBook:
if defval:
self._translateChildren(self._getChild('defval',root=param),target=parameter)
return parameter
-
+
#~ Translate:
#~ <ref kindref="?" ...>...</ref>
def _translate_ref( self, ref, **kwargs ):
return self._translateNode(ref,ref.getAttribute('kindref'))
-
+
#~ Translate:
#~ <ref refid="?" kindref="compound">...</ref>
#~ To:
@@ -732,7 +732,7 @@ class Doxygen2BoostBook:
classname = result.appendChild(self._createNode('classname'))
self._translateChildren(ref,target=classname)
return result
-
+
#~ Translate:
#~ <ref refid="?" kindref="member">...</ref>
#~ To:
@@ -741,7 +741,7 @@ class Doxygen2BoostBook:
result = self._createNode('link',linkend=ref.getAttribute('refid'))
self._translateChildren(ref,target=result)
return result
-
+
#~ Translate:
#~ <type>...</type>
def _translate_type( self, type, target=None, **kwargs ):
@@ -758,7 +758,7 @@ class Doxygen2BoostBook:
target.removeChild(target.firstChild)
target.appendChild(self._createText('emphasis','unspecified'))
return result
-
+
def _getChild( self, tag = None, id = None, name = None, root = None ):
if not root:
root = self.boostbook.documentElement
@@ -777,17 +777,17 @@ class Doxygen2BoostBook:
#~ print '--|', n
return n
return None
-
+
def _getChildData( self, tag, **kwargs ):
return self._getData(self._getChild(tag,**kwargs),**kwargs)
-
+
def _getData( self, node, **kwargs ):
if node:
text = self._getChild('#text',root=node)
if text:
return text.data.strip()
return ''
-
+
def _cppName( self, type ):
parts = re.search('^([^<]+)[<]?(.*)[>]?$',type.strip().strip(':'))
result = {
@@ -806,7 +806,7 @@ class Doxygen2BoostBook:
result['name'] = result['namespace'].pop()+'::'+result['name']
namespace = '::'.join(result['namespace'])
return result
-
+
def _createNode( self, tag, **kwargs ):
result = self.boostbook.createElement(tag)
for k in kwargs.keys():
@@ -816,7 +816,7 @@ class Doxygen2BoostBook:
else:
result.setAttribute(k,kwargs[k])
return result
-
+
def _createText( self, tag, data, **kwargs ):
result = self._createNode(tag,**kwargs)
data = data.strip()
@@ -827,7 +827,7 @@ class Doxygen2BoostBook:
def main( xmldir=None, output=None, id=None, title=None, index=False ):
#~ print '--- main: xmldir = %s, output = %s' % (xmldir,output)
-
+
input = glob.glob( os.path.abspath( os.path.join( xmldir, "*.xml" ) ) )
input.sort
translator = Doxygen2BoostBook(id=id, title=title, index=index)
@@ -847,7 +847,7 @@ def main( xmldir=None, output=None, id=None, title=None, index=False ):
for dox in decl_files:
#~ print '--|',os.path.basename(dox)
translator.addDox(xml.dom.minidom.parse(dox))
-
+
if output:
output = open(output,'w')
else:
diff --git a/tools/build/src/tools/gcc.py b/tools/build/src/tools/gcc.py
index d2d3294380..bc810e489f 100644
--- a/tools/build/src/tools/gcc.py
+++ b/tools/build/src/tools/gcc.py
@@ -38,7 +38,7 @@ __debug = None
def debug():
global __debug
if __debug is None:
- __debug = "--debug-configuration" in bjam.variable("ARGV")
+ __debug = "--debug-configuration" in bjam.variable("ARGV")
return __debug
feature.extend('toolset', ['gcc'])
@@ -333,7 +333,7 @@ flags('gcc.compile', 'INCLUDES', [], ['<include>'])
engine = get_manager().engine()
-engine.register_action('gcc.compile.c++.pch',
+engine.register_action('gcc.compile.c++.pch',
'"$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"')
engine.register_action('gcc.compile.c.pch',
@@ -360,7 +360,7 @@ def gcc_compile_c(targets, sources, properties):
engine.set_target_variable (targets, 'LANG', '-x c')
#}
engine.add_dependency(targets, bjam.call('get-target-variable', targets, 'PCH_FILE'))
-
+
engine.register_action(
'gcc.compile.c++',
'"$(CONFIG_COMMAND)" $(LANG) -ftemplate-depth-128 $(OPTIONS) ' +
diff --git a/tools/build/src/tools/intel-win.jam b/tools/build/src/tools/intel-win.jam
index bccdb1fa3a..d3d58fc6db 100644
--- a/tools/build/src/tools/intel-win.jam
+++ b/tools/build/src/tools/intel-win.jam
@@ -474,6 +474,7 @@ if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
.iclvars-13.0-supported-vcs = "11.0 10.0 9.0" ;
.iclvars-14.0-supported-vcs = "12.0 11.0 10.0 9.0" ;
.iclvars-15.0-supported-vcs = "12.0 11.0 10.0 9.0" ;
+.iclvars-version-alias-vc14 = vs2015 ;
.iclvars-version-alias-vc12 = vs2013 ;
.iclvars-version-alias-vc11 = vs2012 ;
.iclvars-version-alias-vc10 = vs2010 ;
diff --git a/tools/build/src/tools/make.py b/tools/build/src/tools/make.py
index 10baa1cb41..716a561197 100644
--- a/tools/build/src/tools/make.py
+++ b/tools/build/src/tools/make.py
@@ -1,12 +1,12 @@
# Status: ported.
# Base revision: 64068
-# Copyright 2003 Dave Abrahams
-# Copyright 2003 Douglas Gregor
-# Copyright 2006 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2003 Dave Abrahams
+# Copyright 2003 Douglas Gregor
+# Copyright 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# This module defines the 'make' main target rule.
@@ -18,13 +18,13 @@ import b2.build.property_set
class MakeTarget(BasicTarget):
-
+
def construct(self, name, source_targets, property_set):
- action_name = property_set.get("<action>")[0]
+ action_name = property_set.get("<action>")[0]
action = Action(get_manager(), source_targets, action_name[1:], property_set)
target = FileTarget(self.name(), type.type(self.name()),
- self.project(), action, exact=True)
+ self.project(), action, exact=True)
return [ b2.build.property_set.empty(),
[self.project().manager().virtual_targets().register(target)]]
@@ -39,9 +39,9 @@ def make (target_name, sources, generating_rule,
if not requirements:
requirements = []
-
+
requirements.append("<action>%s" % generating_rule)
-
+
m = get_manager()
targets = m.targets()
project = m.projects().current()
diff --git a/tools/build/src/tools/mc.py b/tools/build/src/tools/mc.py
index c194acdff7..9992c36c19 100644
--- a/tools/build/src/tools/mc.py
+++ b/tools/build/src/tools/mc.py
@@ -7,7 +7,7 @@
# Support for Microsoft message compiler tool.
# Notes:
-# - there's just message compiler tool, there's no tool for
+# - there's just message compiler tool, there's no tool for
# extracting message strings from sources
# - This file allows to use Microsoft message compiler
# with any toolset. In msvc.jam, there's more specific
diff --git a/tools/build/src/tools/message.py b/tools/build/src/tools/message.py
index cc0b946ff1..5ec3efc768 100644
--- a/tools/build/src/tools/message.py
+++ b/tools/build/src/tools/message.py
@@ -26,7 +26,7 @@ class MessageTargetClass(targets.BasicTarget):
if not self.built:
for arg in self.args:
if type(arg) == type([]):
- arg = " ".join(arg)
+ arg = " ".join(arg)
print arg
self.built = True
@@ -38,9 +38,9 @@ def message(name, *args):
name = name[0]
t = get_manager().targets()
-
+
project = get_manager().projects().current()
-
+
return t.main_target_alternative(MessageTargetClass(*((name, project) + args)))
get_manager().projects().add_rule("message", message)
diff --git a/tools/build/src/tools/midl.py b/tools/build/src/tools/midl.py
index 86c1f34b6c..51bc51feb8 100644
--- a/tools/build/src/tools/midl.py
+++ b/tools/build/src/tools/midl.py
@@ -19,7 +19,7 @@ def init():
type.register('IDL', ['idl'])
# A type library (.tlb) is generated by MIDL compiler and can be included
-# to resources of an application (.rc). In order to be found by a resource
+# to resources of an application (.rc). In order to be found by a resource
# compiler its target type should be derived from 'H' - otherwise
# the property '<implicit-dependency>' will be ignored.
type.register('MSTYPELIB', ['tlb'], 'H')
@@ -30,10 +30,10 @@ class MidlScanner(scanner.Scanner):
scanner.Scanner.__init__(self)
self.includes = includes
- # List of quoted strings
+ # List of quoted strings
re_strings = "[ \t]*\"([^\"]*)\"([ \t]*,[ \t]*\"([^\"]*)\")*[ \t]*" ;
- # 'import' and 'importlib' directives
+ # 'import' and 'importlib' directives
self.re_import = "import" + re_strings + "[ \t]*;" ;
self.re_importlib = "importlib[ \t]*[(]" + re_strings + "[)][ \t]*;" ;
@@ -58,8 +58,8 @@ class MidlScanner(scanner.Scanner):
# Attach binding of including file to included targets.
# When target is directly created from virtual target
# this extra information is unnecessary. But in other
- # cases, it allows to distinguish between two headers of the
- # same name included from different places.
+ # cases, it allows to distinguish between two headers of the
+ # same name included from different places.
g2 = g + "#" + b
g = "<" + g + ">"
@@ -79,7 +79,7 @@ class MidlScanner(scanner.Scanner):
engine.set_target_variable(included_quoted, 'SEARCH', [utility.get_value(inc) for inc in self.includes])
engine.set_target_variable(imported , 'SEARCH', [utility.get_value(inc) for inc in self.includes])
engine.set_target_variable(imported_tlbs , 'SEARCH', [utility.get_value(inc) for inc in self.includes])
-
+
get_manager().scanners().propagate(type.get_scanner('CPP', PropertySet(self.includes)), included_angle + included_quoted)
get_manager().scanners().propagate(self, imported)
@@ -113,9 +113,9 @@ flags('midl.compile.idl', 'INCLUDES', [], ['<include>'])
builtin.register_c_compiler('midl.compile.idl', ['IDL'], ['MSTYPELIB', 'H', 'C(%_i)', 'C(%_proxy)', 'C(%_dlldata)'], [])
-# MIDL does not always generate '%_proxy.c' and '%_dlldata.c'. This behavior
+# MIDL does not always generate '%_proxy.c' and '%_dlldata.c'. This behavior
# depends on contents of the source IDL file. Calling TOUCH_FILE below ensures
-# that both files will be created so bjam will not try to recreate them
+# that both files will be created so bjam will not try to recreate them
# constantly.
get_manager().engine().register_action(
'midl.compile.idl',
@@ -130,5 +130,5 @@ $(MIDLFLAGS)
/iid "$(<[3]:W)"
/proxy "$(<[4]:W)"
/dlldata "$(<[5]:W)")"
-{touch} "$(<[4]:W)"
+{touch} "$(<[4]:W)"
{touch} "$(<[5]:W)"'''.format(touch=common.file_creation_command()))
diff --git a/tools/build/src/tools/msvc.jam b/tools/build/src/tools/msvc.jam
index 09d7edbc33..f3561db205 100644
--- a/tools/build/src/tools/msvc.jam
+++ b/tools/build/src/tools/msvc.jam
@@ -1161,7 +1161,14 @@ local rule configure-really ( version ? : options * )
{
setup-script = $(setup-phone-$(c)) ;
}
- toolset.flags msvc.compile .CC <windows-api>$(api)/$(cpu-conditions) : $(setup-script)$(compiler) /Zm800 -nologo ;
+ if $(api) = desktop
+ {
+ toolset.flags msvc.compile .CC <windows-api>$(api)/$(cpu-conditions) : $(setup-script)$(compiler) /Zm800 -nologo ;
+ }
+ else
+ {
+ toolset.flags msvc.compile .CC <windows-api>$(api)/$(cpu-conditions) : $(setup-script)$(compiler) /Zm800 /ZW /EHsc -nologo ;
+ }
toolset.flags msvc.compile .ASM <windows-api>$(api)/$(cpu-conditions) : $(setup-script)$(cpu-assembler) -nologo ;
toolset.flags msvc.link .LD <windows-api>$(api)/$(cpu-conditions) : $(setup-script)$(linker) /NOLOGO /INCREMENTAL:NO ;
toolset.flags msvc.archive .LD <windows-api>$(api)/$(cpu-conditions) : $(setup-script)$(linker) /lib /NOLOGO ;
diff --git a/tools/build/src/tools/msvc.py b/tools/build/src/tools/msvc.py
index 02dce9f9ed..8cdc273e0e 100644
--- a/tools/build/src/tools/msvc.py
+++ b/tools/build/src/tools/msvc.py
@@ -44,15 +44,15 @@ __debug = None
def debug():
global __debug
if __debug is None:
- __debug = "--debug-configuration" in bjam.variable("ARGV")
+ __debug = "--debug-configuration" in bjam.variable("ARGV")
return __debug
-# It is not yet clear what to do with Cygwin on python port.
+# It is not yet clear what to do with Cygwin on python port.
def on_cygwin():
return False
-
+
type.register('MANIFEST', ['manifest'])
feature.feature('embed-manifest',['on','off'], ['incidental', 'propagated']) ;
@@ -113,7 +113,7 @@ def init(version = None, command = None, options = None):
options = to_seq(options)
command = to_seq(command)
-
+
if command:
options.extend("<command>"+cmd for cmd in command)
configure(version,options)
@@ -122,7 +122,7 @@ def configure(version=None, options=None):
if version == "all":
if options:
raise RuntimeError("MSVC toolset configuration: options should be empty when '{}' is specified.".format(version))
-
+
# Configure (i.e. mark as used) all registered versions.
all_versions = __versions.all()
if not all_versions:
@@ -145,7 +145,7 @@ def configure(version=None, options=None):
def extend_conditions(conditions,exts):
return [ cond + '/' + ext for cond in conditions for ext in exts ]
-
+
def configure_version_specific(toolset_arg, version, conditions):
# Starting with versions 7.0, the msvc compiler have the /Zc:forScope and
# /Zc:wchar_t options that improve C++ standard conformance, but those
@@ -204,7 +204,7 @@ def configure_version_specific(toolset_arg, version, conditions):
toolset.flags('{}.link'.format(toolset_arg), 'LINKFLAGS', extend_conditions(conditions, __cpu_arch_amd64), ['/MACHINE:X64'])
toolset.flags('{}.link'.format(toolset_arg), 'LINKFLAGS', extend_conditions(conditions, __cpu_arch_i386), ['/MACHINE:X86'])
toolset.flags('{}.link'.format(toolset_arg), 'LINKFLAGS', extend_conditions(conditions, __cpu_arch_ia64), ['/MACHINE:IA64'])
-
+
# Make sure that manifest will be generated even if there is no
# dependencies to put there.
toolset.flags('{}.link'.format(toolset_arg), 'LINKFLAGS', conditions, ['/MANIFEST'])
@@ -216,12 +216,12 @@ def configure_version_specific(toolset_arg, version, conditions):
def register_toolset():
if not 'msvc' in feature.values('toolset'):
register_toolset_really()
-
-
+
+
engine = get_manager().engine()
-# this rule sets up the pdb file that will be used when generating static
-# libraries and the debug-store option is database, so that the compiler
+# this rule sets up the pdb file that will be used when generating static
+# libraries and the debug-store option is database, so that the compiler
# puts all debug info into a single .pdb file named after the library
#
# Poking at source targets this way is probably not clean, but it's the
@@ -252,7 +252,7 @@ $(LIBRARIES_MENTIONED_BY_FILE)
"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib"
"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"'''.format(rm=common.rm_command()),
function=archive)
-
+
# For the assembler the following options are turned on by default:
#
# -Zp4 align structures to 4 bytes
@@ -319,10 +319,10 @@ def compile_cpp_pch(targets,sources=[],properties=None):
#
# 1. PDB_CFLAG is only set for <debug-symbols>on/<debug-store>database, ensuring that the /Fd flag is dropped if PDB_CFLAG is empty
#
-# 2. When compiling executables's source files, PDB_NAME is set on a per-source file basis by rule compile-c-c++.
+# 2. When compiling executables's source files, PDB_NAME is set on a per-source file basis by rule compile-c-c++.
# The linker will pull these into the executable's PDB
#
-# 3. When compiling library's source files, PDB_NAME is updated to <libname>.pdb for each source file by rule archive,
+# 3. When compiling library's source files, PDB_NAME is updated to <libname>.pdb for each source file by rule archive,
# as in this case the compiler must be used to create a single PDB for our library.
#
@@ -330,7 +330,7 @@ class SetupAction:
def __init__(self, setup_func, function):
self.setup_func = setup_func
self.function = function
-
+
def __call__(self, targets, sources, property_set):
assert(callable(self.setup_func))
# This can modify sources.
@@ -378,7 +378,7 @@ def setup_preprocess_c_cpp_action(targets, sources, properties):
sources += bjam.call('get-target-variable',targets,'PCH_FILE')
sources += bjam.call('get-target-variable',targets,'PCH_HEADER')
return 'preprocess-c-c++'
-
+
register_setup_action(
'msvc.preprocess.c',
setup_preprocess_c_cpp_action,
@@ -436,7 +436,7 @@ register_setup_action(
engine.register_action(
'msvc.compile.idl',
'''$(.IDL) /nologo @"@($(<[1]:W).rsp:E=
-"$(>:W)"
+"$(>:W)"
-D$(DEFINES)
"-I$(INCLUDES:W)"
-U$(UNDEFS)
@@ -506,7 +506,7 @@ $(LIBRARIES)
if %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL%''',
function=link_dll,
bound_list=['DEF_FILE','LIBRARIES_MENTIONED_BY_FILE'])
-
+
engine.register_action(
'msvc.manifest.dll',
'''if exist "$(<[1]).manifest" (
@@ -540,7 +540,7 @@ $(LIBRARIES)
"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"''',
function=link_dll,
bound_list=['DEF_FILE','LIBRARIES_MENTIONED_BY_FILE'])
-
+
engine.register_action(
'msvc.manifest.dll',
'''if test -e "$(<[1]).manifest"; then
@@ -566,7 +566,7 @@ class MsvcPchGenerator(pch.PchGenerator):
pch_header = s
elif type.is_derived(s.type(), 'CPP') or type.is_derived(s.type(), 'C'):
pch_source = s
-
+
if not pch_header:
raise RuntimeError( "can not build pch without pch-header" )
@@ -589,7 +589,7 @@ class MsvcPchGenerator(pch.PchGenerator):
result_props.append(Property('pch-header', pch_header))
if pch_file:
result_props.append(Property('pch-file', pch_file))
-
+
return property_set.PropertySet(result_props), generated
@@ -635,7 +635,7 @@ def configure_really(version=None, options=[]):
# Take the first registered (i.e. auto-detected) version.
version = __versions.first()
v = version
-
+
# Note: 'version' can still be empty at this point if no versions have
# been auto-detected.
if not version:
@@ -665,12 +665,12 @@ def configure_really(version=None, options=[]):
conditions = common.check_init_parameters('msvc', None, ('version', v))
__versions.set(version, 'conditions', conditions)
command = feature.get_values('<command>', options)
-
+
# If version is specified, we try to search first in default paths, and
# only then in PATH.
command = common.get_invocation_command('msvc', 'cl.exe', command, default_paths(version))
common.handle_options('msvc', conditions, command, options)
-
+
if not version:
# Even if version is not explicitly specified, try to detect the
# version from the path.
@@ -718,7 +718,7 @@ def configure_really(version=None, options=[]):
# MSVC 7.1 compiler even though it thinks it is using the msvc-9.0
# toolset version.
command = common.get_absolute_tool_path(command)
-
+
if command:
parent = os.path.dirname(os.path.normpath(command))
# Setup will be used if the command name has been specified. If
@@ -834,7 +834,7 @@ def configure_really(version=None, options=[]):
default_assembler_ia64 = 'ias'
assembler = feature.get_values('<assembler>',options)
-
+
idl_compiler = feature.get_values('<idl-compiler>',options)
if not idl_compiler:
idl_compiler = 'midl'
@@ -851,7 +851,7 @@ def configure_really(version=None, options=[]):
for c in cpu:
cpu_conditions = [ condition + '/' + arch for arch in globals()['__cpu_arch_{}'.format(c)] for condition in conditions ]
-
+
setup_script = setup_scripts.get(c, '')
if debug():
@@ -934,7 +934,7 @@ class MsvcLinkingGenerator(builtin.LinkingGenerator):
if result:
name_main = result[0].name()
action = result[0].action()
-
+
if prop_set.get('<debug-symbols>') == 'on':
# We force exact name on PDB. The reason is tagging -- the tag rule may
# reasonably special case some target types, like SHARED_LIB. The tag rule
@@ -947,7 +947,7 @@ class MsvcLinkingGenerator(builtin.LinkingGenerator):
action.replace_targets(target,registered_target)
result.append(registered_target)
if prop_set.get('<embed-manifest>') == 'off':
- # Manifest is evil target. It has .manifest appened to the name of
+ # Manifest is evil target. It has .manifest appened to the name of
# main target, including extension. E.g. a.exe.manifest. We use 'exact'
# name because to achieve this effect.
target = FileTarget(name_main+'.manifest', 'MANIFEST', project, action, True)
@@ -1151,14 +1151,14 @@ class MSVCConfigurations(Configurations):
def first(self):
return self.first_
-
+
# List of all registered configurations.
__versions = MSVCConfigurations()
# Supported CPU architectures.
__cpu_arch_i386 = [
- '<architecture>/<address-model>',
+ '<architecture>/<address-model>',
'<architecture>/<address-model>32',
'<architecture>x86/<address-model>',
'<architecture>x86/<address-model>32']
diff --git a/tools/build/src/tools/package.py b/tools/build/src/tools/package.py
index aa081b4f49..a3b1baef47 100644
--- a/tools/build/src/tools/package.py
+++ b/tools/build/src/tools/package.py
@@ -68,14 +68,14 @@ def install(name, package_name=None, requirements=[], binaries=[], libraries=[],
option.set("bindir", None)
option.set("libdir", None)
option.set("includedir", None)
-
+
# If <install-source-root> is not specified, all headers are installed to
# prefix/include, no matter what their relative path is. Sometimes that is
# what is needed.
install_source_root = property.select('install-source-root', requirements)
if install_source_root:
requirements = property.change(requirements, 'install-source-root', None)
-
+
install_header_subdir = property.select('install-header-subdir', requirements)
if install_header_subdir:
install_header_subdir = ungrist(install_header_subdir[0])
@@ -98,16 +98,16 @@ def install(name, package_name=None, requirements=[], binaries=[], libraries=[],
include_locate = option.get("includedir", os.path.join(prefix, "include"))
stage.install(name + "-bin", binaries, requirements + ["<location>" + bin_locate])
-
+
alias(name + "-lib", [name + "-lib-shared", name + "-lib-static"])
-
+
# Since the install location of shared libraries differs on universe
# and cygwin, use target alternatives to make different targets.
# We should have used indirection conditioanl requirements, but it's
# awkward to pass bin-locate and lib-locate from there to another rule.
alias(name + "-lib-shared", [name + "-lib-shared-universe"])
alias(name + "-lib-shared", [name + "-lib-shared-cygwin"], ["<target-os>cygwin"])
-
+
# For shared libraries, we install both explicitly specified one and the
# shared libraries that the installed executables depend on.
stage.install(name + "-lib-shared-universe", binaries + libraries,
@@ -141,7 +141,7 @@ def install_data(target_name, package_name, data, requirements):
# If --prefix is explicitly specified on the command line,
# then we need wipe away any settings of datarootdir
option.set("datarootdir", None)
-
+
prefix = get_prefix(package_name, requirements)
datadir = option.get("datarootdir", os.path.join(prefix, "share"))
@@ -156,7 +156,7 @@ def get_prefix(package_name, requirements):
if specified:
specified = ungrist(specified[0])
prefix = option.get("prefix", specified)
- requirements = property.change(requirements, "install-default-prefix", None)
+ requirements = property.change(requirements, "install-default-prefix", None)
# Or some likely defaults if neither is given.
if not prefix:
if os.name == "nt":
diff --git a/tools/build/src/tools/rc.py b/tools/build/src/tools/rc.py
index d026480d85..5bdebb9be2 100644
--- a/tools/build/src/tools/rc.py
+++ b/tools/build/src/tools/rc.py
@@ -5,7 +5,7 @@
# distribute this software is granted provided this copyright notice appears in
# all copies. This software is provided "as is" without express or implied
# warranty, and with no claim as to its suitability for any purpose.
-#
+#
# Copyright (c) 2006 Rene Rivera.
#
# Copyright (c) 2008 Steven Watanabe
@@ -39,7 +39,7 @@ __debug = None
def debug():
global __debug
if __debug is None:
- __debug = "--debug-configuration" in bjam.variable("ARGV")
+ __debug = "--debug-configuration" in bjam.variable("ARGV")
return __debug
type.register('RC', ['rc'])
@@ -51,10 +51,10 @@ def configure (command = None, condition = None, options = None):
"""
Configures a new resource compilation command specific to a condition,
usually a toolset selection condition. The possible options are:
-
+
* <rc-type>(rc|windres) - Indicates the type of options the command
accepts.
-
+
Even though the arguments are all optional, only when a command, condition,
and at minimum the rc-type option are given will the command be configured.
This is so that callers don't have to check auto-configuration values
@@ -79,11 +79,11 @@ engine = get_manager().engine()
class RCAction:
"""Class representing bjam action defined from Python.
The function must register the action to execute."""
-
+
def __init__(self, action_name, function):
self.action_name = action_name
self.function = function
-
+
def __call__(self, targets, sources, property_set):
if self.function:
self.function(targets, sources, property_set)
@@ -132,7 +132,7 @@ __angle_include_re = "#include[ ]*<([^<]+)>"
# Register scanner for resources
class ResScanner(scanner.Scanner):
-
+
def __init__(self, includes):
scanner.__init__ ;
self.includes = includes
@@ -149,7 +149,7 @@ class ResScanner(scanner.Scanner):
"[^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)" +\
"[ ]+(([^ \"]+)|\"([^\"]+)\")", [3, 4])
- # Icons and other includes may referenced as
+ # Icons and other includes may referenced as
#
# IDR_MAINFRAME ICON "res\\icon.ico"
#
@@ -163,19 +163,19 @@ class ResScanner(scanner.Scanner):
# Attach binding of including file to included targets.
# When target is directly created from virtual target
# this extra information is unnecessary. But in other
- # cases, it allows to distinguish between two headers of the
- # same name included from different places.
+ # cases, it allows to distinguish between two headers of the
+ # same name included from different places.
# We don't need this extra information for angle includes,
# since they should not depend on including file (we can't
# get literal "." in include path).
g2 = g + "#" + b
-
+
g = "<" + g + ">"
g2 = "<" + g2 + ">"
angle = [g + x for x in angle]
quoted = [g2 + x for x in quoted]
res = [g2 + x for x in res]
-
+
all = angle + quoted
bjam.call('mark-included', target, all)
@@ -187,7 +187,7 @@ class ResScanner(scanner.Scanner):
engine.set_target_variable(angle, 'SEARCH', [utility.get_value(inc) for inc in self.includes])
engine.set_target_variable(quoted, 'SEARCH', [b + utility.get_value(inc) for inc in self.includes])
engine.set_target_variable(res, 'SEARCH', [b + utility.get_value(inc) for inc in self.includes])
-
+
# Just propagate current scanner to includes, in a hope
# that includes do not change scanners.
get_manager().scanners().propagate(self, angle + quoted)
diff --git a/tools/build/src/tools/stage.py b/tools/build/src/tools/stage.py
index 8eda4e2585..76b10f65a6 100644
--- a/tools/build/src/tools/stage.py
+++ b/tools/build/src/tools/stage.py
@@ -54,7 +54,7 @@ class InstallTargetClass(targets.BasicTarget):
if a:
ps = a.properties()
properties = ps.all()
-
+
# Unless <hardcode-dll-paths>true is in properties, which can happen
# only if the user has explicitly requested it, nuke all <dll-path>
# properties.
@@ -80,7 +80,7 @@ class InstallTargetClass(targets.BasicTarget):
properties.extend(build_ps.get_properties('dependency'))
properties.extend(build_ps.get_properties('location'))
-
+
properties.extend(build_ps.get_properties('install-no-version-symlinks'))
@@ -93,7 +93,7 @@ class InstallTargetClass(targets.BasicTarget):
properties.append(property.Property(p.feature(), os.path.abspath(p.value())))
return property_set.create(properties)
-
+
def construct(self, name, source_targets, ps):
@@ -125,7 +125,7 @@ class InstallTargetClass(targets.BasicTarget):
new_ps, [i])
assert isinstance(r, property_set.PropertySet)
staged_targets.extend(targets)
-
+
else:
staged_targets.append(copy_file(self.project(), ename, i, new_ps))
@@ -168,18 +168,18 @@ class InstallTargetClass(targets.BasicTarget):
# CONSIDER: figure out why we can not use virtual-target.traverse here.
#
def collect_targets(self, targets):
-
+
s = [t.creating_subvariant() for t in targets]
s = unique(filter(lambda l: l != None,s))
-
+
result = set(targets)
for i in s:
i.all_referenced_targets(result)
-
+
result2 = []
for r in result:
if isinstance(r, property.Property):
-
+
if r.feature().name() != 'use':
result2.append(r.value())
else:
@@ -284,7 +284,7 @@ class InstalledSharedLibGenerator(generators.Generator):
else:
need_relink = ps.get('dll-path') != source.action().properties().get('dll-path')
-
+
if need_relink:
# Rpath changed, need to relink.
copied = relink_file(project, source, ps)
@@ -308,13 +308,13 @@ class InstalledSharedLibGenerator(generators.Generator):
# compatibility guarantees. If not, it is possible to skip those
# symlinks.
if ps.get('install-no-version-symlinks') != ['on']:
-
+
result.append(symlink(m.group(1) + '.' + m.group(2), project, copied, ps))
result.append(symlink(m.group(1) + '.' + m.group(2) + '.' + m.group(3),
project, copied, ps))
return result
-
+
generators.register(InstalledSharedLibGenerator())
@@ -335,9 +335,9 @@ def install(name, sources, requirements=[], default_build=[], usage_requirements
from b2.manager import get_manager
t = get_manager().targets()
-
+
project = get_manager().projects().current()
-
+
return t.main_target_alternative(
InstallTargetClass(name, project,
t.main_target_sources(sources, name),
diff --git a/tools/build/src/tools/symlink.py b/tools/build/src/tools/symlink.py
index 6345ded6d3..ed53889770 100644
--- a/tools/build/src/tools/symlink.py
+++ b/tools/build/src/tools/symlink.py
@@ -1,11 +1,11 @@
# Status: ported.
# Base revision: 64488.
-# Copyright 2003 Dave Abrahams
-# Copyright 2002, 2003 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Defines the "symlink" special target. 'symlink' targets make symbolic links
# to the sources.
@@ -30,18 +30,18 @@ class SymlinkTarget(targets.BasicTarget):
_count = 0
def __init__(self, project, targets, sources):
-
+
# Generate a fake name for now. Need unnamed targets eventually.
fake_name = "symlink#%s" % SymlinkTarget._count
SymlinkTarget._count = SymlinkTarget._count + 1
b2.build.targets.BasicTarget.__init__(self, fake_name, project, sources)
-
+
# Remember the targets to map the sources onto. Pad or truncate
# to fit the sources given.
assert len(targets) <= len(sources)
self.targets = targets[:] + sources[len(targets):]
-
+
# The virtual targets corresponding to the given targets.
self.virtual_targets = []
@@ -51,7 +51,7 @@ class SymlinkTarget(targets.BasicTarget):
s = self.targets[i]
a = virtual_target.Action(self.manager(), [t], "symlink.ln", ps)
vt = virtual_target.FileTarget(os.path.basename(s), t.type(), self.project(), a)
-
+
# Place the symlink in the directory relative to the project
# location, instead of placing it in the build directory.
if not ps.get('symlink-location') == "project-relative":
@@ -80,11 +80,11 @@ class SymlinkTarget(targets.BasicTarget):
def symlink(targets, sources):
from b2.manager import get_manager
- t = get_manager().targets()
+ t = get_manager().targets()
p = get_manager().projects().current()
return t.main_target_alternative(
- SymlinkTarget(p, targets,
+ SymlinkTarget(p, targets,
# Note: inline targets are not supported for symlink, intentionally,
# since it's used to linking existing non-local targets.
sources))
diff --git a/tools/build/src/tools/testing.py b/tools/build/src/tools/testing.py
index a3b3f01174..868905a05b 100644
--- a/tools/build/src/tools/testing.py
+++ b/tools/build/src/tools/testing.py
@@ -45,7 +45,7 @@ import b2.build_system as build_system
from b2.manager import get_manager
-from b2.util import stem, bjam_signature
+from b2.util import stem, bjam_signature, is_iterable_typed
from b2.util.sequence import unique
import bjam
@@ -88,7 +88,10 @@ __all_tests = []
# Helper rule. Create a test target, using basename of first source if no target
# name is explicitly passed. Remembers the created target in a global variable.
def make_test(target_type, sources, requirements, target_name=None):
-
+ assert isinstance(target_type, basestring)
+ assert is_iterable_typed(sources, basestring)
+ assert is_iterable_typed(requirements, basestring)
+ assert isinstance(target_type, basestring) or target_type is None
if not target_name:
target_name = stem(os.path.basename(sources[0]))
@@ -151,7 +154,7 @@ def handle_input_files(input_files):
@bjam_signature((["sources", "*"], ["args", "*"], ["input_files", "*"],
["requirements", "*"], ["target_name", "?"],
- ["default_build", "*"]))
+ ["default_build", "*"]))
def run(sources, args, input_files, requirements, target_name=None, default_build=[]):
if args:
requirements.append("<testing.arg>" + " ".join(args))
@@ -160,7 +163,7 @@ def run(sources, args, input_files, requirements, target_name=None, default_buil
@bjam_signature((["sources", "*"], ["args", "*"], ["input_files", "*"],
["requirements", "*"], ["target_name", "?"],
- ["default_build", "*"]))
+ ["default_build", "*"]))
def run_fail(sources, args, input_files, requirements, target_name=None, default_build=[]):
if args:
requirements.append("<testing.arg>" + " ".join(args))
@@ -189,7 +192,8 @@ __ln1 = re.compile("/(tools|libs)/(.*)/(test|example)")
__ln2 = re.compile("/(tools|libs)/(.*)$")
__ln3 = re.compile("(/status$)")
def get_library_name(path):
-
+ assert isinstance(path, basestring)
+
path = path.replace("\\", "/")
match1 = __ln1.match(path)
match2 = __ln2.match(path)
@@ -216,6 +220,7 @@ __out_xml = option.get("out-xml", False, True)
# - relative location of all source from the project root.
#
def dump_test(target):
+ assert isinstance(target, targets.AbstractTarget)
type = target.type()
name = target.name()
project = target.project()
@@ -298,7 +303,11 @@ generators.register_composing("testing.time", [], ["TIME"])
# contained in testing-aux.jam, which we load into Jam module named 'testing'
def run_path_setup(target, sources, ps):
-
+ if __debug__:
+ from ..build.property_set import PropertySet
+ assert is_iterable_typed(target, basestring) or isinstance(target, basestring)
+ assert is_iterable_typed(sources, basestring)
+ assert isinstance(ps, PropertySet)
# For testing, we need to make sure that all dynamic libraries needed by the
# test are found. So, we collect all paths from dependency libraries (via
# xdll-path property) and add whatever explicit dll-path user has specified.
@@ -313,7 +322,12 @@ def run_path_setup(target, sources, ps):
common.shared_library_path_variable(), dll_paths))
def capture_output_setup(target, sources, ps):
- run_path_setup(target, sources, ps)
+ if __debug__:
+ from ..build.property_set import PropertySet
+ assert is_iterable_typed(target, basestring)
+ assert is_iterable_typed(sources, basestring)
+ assert isinstance(ps, PropertySet)
+ run_path_setup(target[0], sources, ps)
if ps.get('preserve-test-targets') == ['off']:
bjam.call("set-target-variable", target, "REMOVE_TEST_TARGETS", "1")
diff --git a/tools/build/src/tools/unix.py b/tools/build/src/tools/unix.py
index 34758f57b5..681a872027 100644
--- a/tools/build/src/tools/unix.py
+++ b/tools/build/src/tools/unix.py
@@ -15,17 +15,17 @@ from b2.util.utility import *
from b2.util import set, sequence
class UnixLinkingGenerator (builtin.LinkingGenerator):
-
+
def __init__ (self, id, composing, source_types, target_types, requirements):
builtin.LinkingGenerator.__init__ (self, id, composing, source_types, target_types, requirements)
-
+
def run (self, project, name, prop_set, sources):
result = builtin.LinkingGenerator.run (self, project, name, prop_set, sources)
if result:
set_library_order (project.manager (), sources, prop_set, result [1])
-
+
return result
-
+
def generated_targets (self, sources, prop_set, project, name):
sources2 = []
libraries = []
@@ -35,34 +35,34 @@ class UnixLinkingGenerator (builtin.LinkingGenerator):
else:
sources2.append (l)
-
+
sources = sources2 + order_libraries (libraries)
-
+
return builtin.LinkingGenerator.generated_targets (self, sources, prop_set, project, name)
class UnixArchiveGenerator (builtin.ArchiveGenerator):
def __init__ (self, id, composing, source_types, target_types_and_names, requirements):
builtin.ArchiveGenerator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
-
+
def run (self, project, name, prop_set, sources):
result = builtin.ArchiveGenerator.run(self, project, name, prop_set, sources)
set_library_order(project.manager(), sources, prop_set, result)
return result
class UnixSearchedLibGenerator (builtin.SearchedLibGenerator):
-
+
def __init__ (self):
builtin.SearchedLibGenerator.__init__ (self)
-
+
def optional_properties (self):
return self.requirements ()
-
+
def run (self, project, name, prop_set, sources):
result = SearchedLibGenerator.run (project, name, prop_set, sources)
-
+
set_library_order (sources, prop_set, result)
-
+
return result
class UnixPrebuiltLibGenerator (generators.Generator):
@@ -86,21 +86,21 @@ generators.register (UnixPrebuiltLibGenerator ('unix.prebuilt', False, [], ['LIB
### # Declare generators
-### generators.register [ new UnixLinkingGenerator unix.link : LIB OBJ : EXE
+### generators.register [ new UnixLinkingGenerator unix.link : LIB OBJ : EXE
### : <toolset>unix ] ;
generators.register (UnixArchiveGenerator ('unix.archive', True, ['OBJ'], ['STATIC_LIB'], ['<toolset>unix']))
-### generators.register [ new UnixLinkingGenerator unix.link.dll : LIB OBJ : SHARED_LIB
+### generators.register [ new UnixLinkingGenerator unix.link.dll : LIB OBJ : SHARED_LIB
### : <toolset>unix ] ;
-###
-### generators.register [ new UnixSearchedLibGenerator
+###
+### generators.register [ new UnixSearchedLibGenerator
### unix.SearchedLibGenerator : : SEARCHED_LIB : <toolset>unix ] ;
-###
-###
+###
+###
### # The derived toolset must specify their own actions.
### actions link {
### }
-###
+###
### actions link.dll {
### }
@@ -110,9 +110,9 @@ def unix_archive (manager, targets, sources, properties):
# FIXME: restore?
#action.register ('unix.archive', unix_archive, [''])
-### actions searched-lib-generator {
+### actions searched-lib-generator {
### }
-###
+###
### actions prebuilt {
### }
@@ -141,10 +141,10 @@ def set_library_order (manager, sources, prop_set, result):
for l in result:
if l.type () and type.is_derived (l.type (), 'LIB'):
created_libraries.append (l)
-
+
created_libraries = set.difference (created_libraries, used_libraries)
set_library_order_aux (created_libraries, used_libraries)
def order_libraries (libraries):
return __order.order (libraries)
-
+
diff --git a/tools/build/src/util/__init__.py b/tools/build/src/util/__init__.py
index e6a277af77..7c847cb577 100644
--- a/tools/build/src/util/__init__.py
+++ b/tools/build/src/util/__init__.py
@@ -6,14 +6,153 @@ import types
from itertools import groupby
+def safe_isinstance(value, types=None, class_names=None):
+ """To prevent circular imports, this extends isinstance()
+ by checking also if `value` has a particular class name (or inherits from a
+ particular class name). This check is safe in that an AttributeError is not
+ raised in case `value` doesn't have a __class__ attribute.
+ """
+ # inspect is being imported here because I seriously doubt
+ # that this function will be used outside of the type
+ # checking below.
+ import inspect
+ result = False
+ if types is not None:
+ result = result or isinstance(value, types)
+ if class_names is not None and not result:
+ # this doesn't work with inheritance, but normally
+ # either the class will already be imported within the module,
+ # or the class doesn't have any subclasses. For example: PropertySet
+ if isinstance(class_names, basestring):
+ class_names = [class_names]
+ # this is the part that makes it "safe".
+ try:
+ base_names = [class_.__name__ for class_ in inspect.getmro(value.__class__)]
+ for name in class_names:
+ if name in base_names:
+ return True
+ except AttributeError:
+ pass
+ return result
+
+
+def is_iterable_typed(values, type_):
+ return is_iterable(values) and all(isinstance(v, type_) for v in values)
+
+
+def is_iterable(value):
+ """Returns whether value is iterable and not a string."""
+ return not isinstance(value, basestring) and hasattr(value, '__iter__')
+
+
+def is_iterable_or_none(value):
+ return is_iterable(value) or value is None
+
+
+def is_single_value(value):
+ # some functions may specify a bjam signature
+ # that is a string type, but still allow a
+ # PropertySet to be passed in
+ return safe_isinstance(value, (basestring, type(None)), 'PropertySet')
+
+
+if __debug__:
+
+ from textwrap import dedent
+ message = dedent(
+ """The parameter "{}" was passed in a wrong type for the "{}()" function.
+ Actual:
+ \ttype: {}
+ \tvalue: {}
+ Expected:
+ \t{}
+ """
+ )
+
+ bjam_types = {
+ '*': is_iterable_or_none,
+ '+': is_iterable_or_none,
+ '?': is_single_value,
+ '': is_single_value,
+ }
+
+ bjam_to_python = {
+ '*': 'iterable',
+ '+': 'iterable',
+ '?': 'single value',
+ '': 'single value',
+ }
+
+
+ def get_next_var(field):
+ it = iter(field)
+ var = it.next()
+ type_ = None
+ yield_var = False
+ while type_ not in bjam_types:
+ try:
+ # the first value has already
+ # been consumed outside of the loop
+ type_ = it.next()
+ except StopIteration:
+ # if there are no more values, then
+ # var still needs to be returned
+ yield_var = True
+ break
+ if type_ not in bjam_types:
+ # type_ is not a type and is
+ # another variable in the same field.
+ yield var, ''
+ # type_ is the next var
+ var = type_
+ else:
+ # otherwise, type_ is a type for var
+ yield var, type_
+ try:
+ # the next value should be a var
+ var = it.next()
+ except StopIteration:
+ # if not, then we're done with
+ # this field
+ break
+ if yield_var:
+ yield var, ''
+
+
# Decorator the specifies bjam-side prototype for a Python function
def bjam_signature(s):
+ if __debug__:
+ from inspect import getcallargs
+ def decorator(fn):
+ function_name = fn.__module__ + '.' + fn.__name__
+ def wrapper(*args, **kwargs):
+ callargs = getcallargs(fn, *args, **kwargs)
+ for field in s:
+ for var, type_ in get_next_var(field):
+ try:
+ value = callargs[var]
+ except KeyError:
+ raise Exception(
+ 'Bjam Signature specifies a variable named "{}"\n'
+ 'but is not found within the python function signature\n'
+ 'for function {}()'.format(var, function_name)
+ )
+ if not bjam_types[type_](value):
+ raise TypeError(
+ message.format(var, function_name, type(type_), repr(value),
+ bjam_to_python[type_])
+ )
+ return fn(*args, **kwargs)
+ wrapper.__name__ = fn.__name__
+ wrapper.bjam_signature = s
+ return wrapper
+ return decorator
+ else:
+ def decorator(f):
+ f.bjam_signature = s
+ return f
- def wrap(f):
- f.bjam_signature = s
- return f
-
- return wrap
+ return decorator
def metatarget(f):
@@ -58,9 +197,9 @@ def qualify_jam_action(action_name, context_module):
ix = action_name.find('.')
if ix != -1 and action_name[:ix] == context_module:
return context_module + '%' + action_name[ix+1:]
-
- return context_module + '%' + action_name
-
+
+ return context_module + '%' + action_name
+
def set_jam_action(name, *args):
diff --git a/tools/build/src/util/indirect.py b/tools/build/src/util/indirect.py
index 78fa89946b..01c2e77c21 100644
--- a/tools/build/src/util/indirect.py
+++ b/tools/build/src/util/indirect.py
@@ -1,6 +1,6 @@
# Status: minimally ported. This module is not supposed to be used much
# with Boost.Build/Python.
-#
+#
# Copyright 2003 Dave Abrahams
# Copyright 2003 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
diff --git a/tools/build/src/util/logger.py b/tools/build/src/util/logger.py
index de6521290a..8da0434ae9 100644
--- a/tools/build/src/util/logger.py
+++ b/tools/build/src/util/logger.py
@@ -7,25 +7,25 @@ import sys
class NullLogger:
def __init__ (self):
self.indent_ = ''
-
+
def log (self, source_name, *args):
if self.on () and self.interesting (source_name):
self.do_log (self.indent_)
for i in args:
self.do_log (i)
self.do_log ('\n')
-
+
def increase_indent (self):
if self.on ():
self.indent_ += ' '
-
+
def decrease_indent (self):
if self.on () and len (self.indent_) > 4:
self.indent_ = self.indent_ [-4:]
def do_log (self, *args):
pass
-
+
def interesting (self, source_name):
return False
@@ -35,10 +35,10 @@ class NullLogger:
class TextLogger (NullLogger):
def __init__ (self):
NullLogger.__init__ (self)
-
+
def do_log (self, arg):
sys.stdout.write (str (arg))
-
+
def interesting (self, source_name):
return True
diff --git a/tools/build/src/util/option.py b/tools/build/src/util/option.py
index 47d6abdff6..120c2a32c2 100644
--- a/tools/build/src/util/option.py
+++ b/tools/build/src/util/option.py
@@ -13,9 +13,9 @@ options = {}
# Set a value for a named option, to be used when not overridden on the command
# line.
def set(name, value=None):
-
+
global options
-
+
options[name] = value
def get(name, default_value=None, implied_value=None):
diff --git a/tools/build/src/util/order.py b/tools/build/src/util/order.py
index 4e67b3f1a1..de990b734b 100644
--- a/tools/build/src/util/order.py
+++ b/tools/build/src/util/order.py
@@ -9,26 +9,26 @@ class Order:
The primary use case is the gcc toolset, which is sensitive to
library order: if library 'a' uses symbols from library 'b',
then 'a' must be present before 'b' on the linker's command line.
-
+
This requirement can be lifted for gcc with GNU ld, but for gcc with
Solaris LD (and for Solaris toolset as well), the order always matters.
-
+
So, we need to store order requirements and then order libraries
according to them. It it not possible to use dependency graph as
order requirements. What we need is "use symbols" relationship
while dependency graph provides "needs to be updated" relationship.
-
+
For example::
lib a : a.cpp b;
lib b ;
-
+
For static linking, the 'a' library need not depend on 'b'. However, it
still should come before 'b' on the command line.
"""
def __init__ (self):
self.constraints_ = []
-
+
def add_pair (self, first, second):
""" Adds the constraint that 'first' should precede 'second'.
"""
@@ -37,7 +37,7 @@ class Order:
def order (self, objects):
""" Given a list of objects, reorder them so that the constains specified
by 'add_pair' are satisfied.
-
+
The algorithm was adopted from an awk script by Nikita Youshchenko
(yoush at cs dot msu dot su)
"""
@@ -46,11 +46,11 @@ class Order:
# rather removing edges.
result = []
- if not objects:
+ if not objects:
return result
constraints = self.__eliminate_unused_constraits (objects)
-
+
# Find some library that nobody depends upon and add it to
# the 'result' array.
obj = None
@@ -68,7 +68,7 @@ class Order:
new_objects.append (obj)
obj = None
objects = objects [1:]
-
+
if not obj:
raise BaseException ("Circular order dependencies")
@@ -82,7 +82,7 @@ class Order:
# Add the remaining objects for further processing
# on the next iteration
objects = new_objects
-
+
return result
def __eliminate_unused_constraits (self, objects):
@@ -96,9 +96,9 @@ class Order:
result.append (c)
return result
-
+
def __has_no_dependents (self, obj, constraints):
- """ Returns true if there's no constraint in 'constraints' where
+ """ Returns true if there's no constraint in 'constraints' where
'obj' comes second.
"""
failed = False
@@ -111,7 +111,7 @@ class Order:
constraints = constraints [1:]
return not failed
-
+
def __remove_satisfied (self, constraints, obj):
result = []
for c in constraints:
diff --git a/tools/build/src/util/path.py b/tools/build/src/util/path.py
index d602598c97..7b90320730 100644
--- a/tools/build/src/util/path.py
+++ b/tools/build/src/util/path.py
@@ -7,13 +7,13 @@
# all copies. This software is provided "as is" without express or implied
# warranty, and with no claim as to its suitability for any purpose.
-# Performs various path manipulations. Path are always in a 'normilized'
+# Performs various path manipulations. Path are always in a 'normilized'
# representation. In it, a path may be either:
#
# - '.', or
#
# - ['/'] [ ( '..' '/' )* (token '/')* token ]
-#
+#
# In plain english, path can be rooted, '..' elements are allowed only
# at the beginning, and it never ends in slash, except for path consisting
# of slash only.
@@ -40,6 +40,7 @@ def make (native):
# TODO: make os selection here.
return make_UNIX (native)
+@bjam_signature([['native']])
def make_UNIX (native):
# VP: I have no idea now 'native' can be empty here! But it can!
@@ -60,7 +61,7 @@ def native_UNIX (path):
def pwd ():
""" Returns the current working directory.
- # TODO: is it a good idea to use the current dir? Some use-cases
+ # TODO: is it a good idea to use the current dir? Some use-cases
may not allow us to depend on the current dir.
"""
return make (os.getcwd ())
@@ -79,38 +80,38 @@ def is_rooted (path):
# # distribute this software is granted provided this copyright notice appears in
# # all copies. This software is provided "as is" without express or implied
# # warranty, and with no claim as to its suitability for any purpose.
-#
-# # Performs various path manipulations. Path are always in a 'normilized'
+#
+# # Performs various path manipulations. Path are always in a 'normilized'
# # representation. In it, a path may be either:
# #
# # - '.', or
# #
# # - ['/'] [ ( '..' '/' )* (token '/')* token ]
-# #
+# #
# # In plain english, path can be rooted, '..' elements are allowed only
# # at the beginning, and it never ends in slash, except for path consisting
# # of slash only.
-#
+#
# import modules ;
# import sequence ;
# import regex ;
# import errors : error ;
-#
-#
+#
+#
# os = [ modules.peek : OS ] ;
-# if [ modules.peek : UNIX ]
-# {
+# if [ modules.peek : UNIX ]
+# {
# local uname = [ modules.peek : JAMUNAME ] ;
# switch $(uname)
# {
# case CYGWIN* :
# os = CYGWIN ;
-#
+#
# case * :
# os = UNIX ;
-# }
+# }
# }
-#
+#
# #
# # Tests if a path is rooted.
# #
@@ -118,7 +119,7 @@ def is_rooted (path):
# {
# return [ MATCH "^(/)" : $(path) ] ;
# }
-#
+#
# #
# # Tests if a path has a parent.
# #
@@ -130,7 +131,7 @@ def is_rooted (path):
# return ;
# }
# }
-#
+#
# #
# # Returns the path without any directory components.
# #
@@ -138,22 +139,22 @@ def is_rooted (path):
# {
# return [ MATCH "([^/]+)$" : $(path) ] ;
# }
-#
+#
# #
# # Returns parent directory of the path. If no parent exists, error is issued.
# #
# rule parent ( path )
# {
# if [ has-parent $(path) ] {
-#
+#
# if $(path) = . {
# return .. ;
# } else {
-#
+#
# # Strip everything at the end of path up to and including
# # the last slash
# local result = [ regex.match "((.*)/)?([^/]+)" : $(path) : 2 3 ] ;
-#
+#
# # Did we strip what we shouldn't?
# if $(result[2]) = ".." {
# return $(path)/.. ;
@@ -172,7 +173,7 @@ def is_rooted (path):
# error "Path '$(path)' has no parent" ;
# }
# }
-#
+#
# #
# # Returns path2 such that "[ join path path2 ] = .".
# # The path may not contain ".." element or be rooted.
@@ -231,19 +232,19 @@ def reverse(path):
# {
# return [ NORMALIZE_PATH $(elements:J="/") ] ;
# }
-#
+#
# #
# # Contanenates the passed path elements. Generates an error if
# # any element other than the first one is rooted.
# #
# rule join ( elements + )
# {
-# if ! $(elements[2])
+# if ! $(elements[2])
# {
# return $(elements[1]) ;
# }
# else
-# {
+# {
# for local e in $(elements[2-])
# {
# if [ is-rooted $(e) ]
@@ -252,13 +253,13 @@ def reverse(path):
# }
# }
# return [ join-imp $(elements) ] ;
-# }
+# }
# }
def glob (dirs, patterns):
""" Returns the list of files matching the given pattern in the
- specified directory. Both directories and patterns are
+ specified directory. Both directories and patterns are
supplied as portable paths. Each pattern should be non-absolute
path, and can't contain "." or ".." elements. Each slash separated
element of pattern can contain the following special characters:
@@ -266,10 +267,10 @@ def glob (dirs, patterns):
- '*', which matches arbitrary number of characters.
A file $(d)/e1/e2/e3 (where 'd' is in $(dirs)) matches pattern p1/p2/p3
if and only if e1 matches p1, e2 matches p2 and so on.
-
- For example:
- [ glob . : *.cpp ]
- [ glob . : */build/Jamfile ]
+
+ For example:
+ [ glob . : *.cpp ]
+ [ glob . : */build/Jamfile ]
"""
# {
# local result ;
@@ -282,43 +283,43 @@ def glob (dirs, patterns):
# # First glob for directory part.
# local globbed-dirs = [ glob $(dirs) : $(p:D) ] ;
# result += [ glob $(globbed-dirs) : $(p:D="") ] ;
-# }
+# }
# }
# else
-# {
+# {
# # When a pattern has not directory, we glob directly.
# # Take care of special ".." value. The "GLOB" rule simply ignores
# # the ".." element (and ".") element in directory listings. This is
-# # needed so that
+# # needed so that
# #
# # [ glob libs/*/Jamfile ]
# #
-# # don't return
+# # don't return
# #
# # libs/../Jamfile (which is the same as ./Jamfile)
# #
# # On the other hand, when ".." is explicitly present in the pattern
# # we need to return it.
-# #
+# #
# for local dir in $(dirs)
# {
# for local p in $(patterns)
-# {
+# {
# if $(p) != ".."
-# {
-# result += [ sequence.transform make
+# {
+# result += [ sequence.transform make
# : [ GLOB [ native $(dir) ] : $(p) ] ] ;
-# }
+# }
# else
# {
# result += [ path.join $(dir) .. ] ;
-# }
-# }
+# }
+# }
# }
-# }
+# }
# return $(result) ;
# }
-#
+#
# TODO: (PF) I replaced the code above by this. I think it should work but needs to be tested.
result = []
@@ -335,7 +336,7 @@ def glob (dirs, patterns):
import glob
result.extend (glob.glob (p))
return result
-
+
#
# Find out the absolute name of path and returns the list of all the parents,
# starting with the immediate one. Parents are returned as relative names.
@@ -354,7 +355,7 @@ def all_parents(path, upper_limit=None, cwd=None):
result = []
while path_abs and path_abs != upper_limit:
(head, tail) = os.path.split(path)
- path = os.path.join(path, "..")
+ path = os.path.join(path, "..")
result.append(path)
path_abs = head
@@ -362,7 +363,7 @@ def all_parents(path, upper_limit=None, cwd=None):
raise BaseException("'%s' is not a prefix of '%s'" % (upper_limit, path))
return result
-
+
# Search for 'pattern' in parent directories of 'dir', up till and including
# 'upper_limit', if it is specified, or till the filesystem root otherwise.
#
@@ -377,22 +378,22 @@ def glob_in_parents(dir, patterns, upper_limit=None):
return result
-#
+#
# #
# # Assuming 'child' is a subdirectory of 'parent', return the relative
# # path from 'parent' to 'child'
# #
# rule relative ( child parent )
# {
-# if $(parent) = "."
+# if $(parent) = "."
# {
# return $(child) ;
# }
-# else
-# {
+# else
+# {
# local split1 = [ regex.split $(parent) / ] ;
# local split2 = [ regex.split $(child) / ] ;
-#
+#
# while $(split1)
# {
# if $(split1[1]) = $(split2[1])
@@ -403,12 +404,12 @@ def glob_in_parents(dir, patterns, upper_limit=None):
# else
# {
# errors.error $(child) is not a subdir of $(parent) ;
-# }
-# }
-# return [ join $(split2) ] ;
-# }
+# }
+# }
+# return [ join $(split2) ] ;
+# }
# }
-#
+#
# # Returns the minimal path to path2 that is relative path1.
# #
# rule relative-to ( path1 path2 )
@@ -416,7 +417,7 @@ def glob_in_parents(dir, patterns, upper_limit=None):
# local root_1 = [ regex.split [ reverse $(path1) ] / ] ;
# local split1 = [ regex.split $(path1) / ] ;
# local split2 = [ regex.split $(path2) / ] ;
-#
+#
# while $(split1) && $(root_1)
# {
# if $(split1[1]) = $(split2[1])
@@ -438,10 +439,10 @@ def glob_in_parents(dir, patterns, upper_limit=None):
def programs_path ():
raw = []
names = ['PATH', 'Path', 'path']
-
+
for name in names:
raw.append(os.environ.get (name, ''))
-
+
result = []
for elem in raw:
if elem:
@@ -458,40 +459,40 @@ def programs_path ():
# {
# local tokens = [ regex.split $(native) "[/\\]" ] ;
# local result ;
-#
+#
# # Handle paths ending with slashes
# if $(tokens[-1]) = ""
# {
# tokens = $(tokens[1--2]) ; # discard the empty element
# }
-#
+#
# result = [ path.join $(tokens) ] ;
-#
+#
# if [ regex.match "(^.:)" : $(native) ]
# {
# result = /$(result) ;
# }
-#
+#
# if $(native) = ""
# {
# result = "." ;
# }
-#
+#
# return $(result) ;
# }
-#
+#
# rule native-NT ( path )
# {
# local result = [ MATCH "^/?(.*)" : $(path) ] ;
# result = [ sequence.join [ regex.split $(result) "/" ] : "\\" ] ;
# return $(result) ;
# }
-#
+#
# rule make-CYGWIN ( path )
# {
# return [ make-NT $(path) ] ;
# }
-#
+#
# rule native-CYGWIN ( path )
# {
# local result = $(path) ;
@@ -501,7 +502,7 @@ def programs_path ():
# }
# return [ native-UNIX $(result) ] ;
# }
-#
+#
# #
# # split-VMS: splits input native path into
# # device dir file (each part is optional),
@@ -515,10 +516,10 @@ def programs_path ():
# local device = $(matches[1]) ;
# local dir = $(matches[2]) ;
# local file = $(matches[3]) ;
-#
+#
# return $(device) $(dir) $(file) ;
# }
-#
+#
# #
# # Converts a native VMS path into a portable path spec.
# #
@@ -535,13 +536,13 @@ def programs_path ():
# {
# errors.error "Can't handle default-device absolute paths: " $(native) ;
# }
-#
+#
# local parts = [ split-path-VMS $(native) ] ;
# local device = $(parts[1]) ;
# local dir = $(parts[2]) ;
# local file = $(parts[3]) ;
# local elems ;
-#
+#
# if $(device)
# {
# #
@@ -549,7 +550,7 @@ def programs_path ():
# #
# elems = /$(device) ;
# }
-#
+#
# if $(dir) = "[]"
# {
# #
@@ -561,7 +562,7 @@ def programs_path ():
# {
# dir = [ regex.replace $(dir) "\\[|\\]" "" ] ;
# local dir_parts = [ regex.split $(dir) \\. ] ;
-#
+#
# if $(dir_parts[1]) = ""
# {
# #
@@ -569,15 +570,15 @@ def programs_path ():
# #
# dir_parts = $(dir_parts[2--1]) ;
# }
-#
+#
# #
# # replace "parent-directory" parts (- => ..)
# #
# dir_parts = [ regex.replace-list $(dir_parts) : - : .. ] ;
-#
+#
# elems = $(elems) $(dir_parts) ;
# }
-#
+#
# if $(file)
# {
# if ! [ MATCH (\\.) : $(file) ]
@@ -589,12 +590,12 @@ def programs_path ():
# }
# elems = $(elems) $(file) ;
# }
-#
+#
# local portable = [ path.join $(elems) ] ;
-#
+#
# return $(portable) ;
# }
-#
+#
# #
# # Converts a portable path spec into a native VMS path.
# #
@@ -608,7 +609,7 @@ def programs_path ():
# local file = "" ;
# local native ;
# local split ;
-#
+#
# #
# # Has device ?
# #
@@ -618,7 +619,7 @@ def programs_path ():
# device = $(split[1]) ;
# dir = $(split[2]) ;
# }
-#
+#
# #
# # Has file ?
# #
@@ -631,13 +632,13 @@ def programs_path ():
# #
# split = [ regex.split $(dir) / ] ;
# local maybe_file = $(split[-1]) ;
-#
+#
# if [ MATCH ^([^.]+\\..*) : $(maybe_file) ]
# {
# file = $(maybe_file) ;
# dir = [ sequence.join $(split[1--2]) : / ] ;
# }
-#
+#
# #
# # Has dir spec ?
# #
@@ -649,59 +650,59 @@ def programs_path ():
# {
# dir = [ regex.replace $(dir) \\.\\. - ] ;
# dir = [ regex.replace $(dir) / . ] ;
-#
+#
# if $(device) = ""
# {
# #
# # Relative directory
-# #
+# #
# dir = "."$(dir) ;
# }
# dir = "["$(dir)"]" ;
# }
-#
+#
# native = [ sequence.join $(device) $(dir) $(file) ] ;
-#
+#
# return $(native) ;
# }
-#
-#
+#
+#
# rule __test__ ( ) {
-#
+#
# import assert ;
# import errors : try catch ;
-#
+#
# assert.true is-rooted "/" ;
# assert.true is-rooted "/foo" ;
# assert.true is-rooted "/foo/bar" ;
# assert.result : is-rooted "." ;
# assert.result : is-rooted "foo" ;
# assert.result : is-rooted "foo/bar" ;
-#
+#
# assert.true has-parent "foo" ;
# assert.true has-parent "foo/bar" ;
# assert.true has-parent "." ;
# assert.result : has-parent "/" ;
-#
+#
# assert.result "." : basename "." ;
# assert.result ".." : basename ".." ;
# assert.result "foo" : basename "foo" ;
# assert.result "foo" : basename "bar/foo" ;
# assert.result "foo" : basename "gaz/bar/foo" ;
# assert.result "foo" : basename "/gaz/bar/foo" ;
-#
+#
# assert.result "." : parent "foo" ;
# assert.result "/" : parent "/foo" ;
# assert.result "foo/bar" : parent "foo/bar/giz" ;
# assert.result ".." : parent "." ;
# assert.result ".." : parent "../foo" ;
# assert.result "../../foo" : parent "../../foo/bar" ;
-#
-#
+#
+#
# assert.result "." : reverse "." ;
# assert.result ".." : reverse "foo" ;
# assert.result "../../.." : reverse "foo/bar/giz" ;
-#
+#
# assert.result "foo" : join "foo" ;
# assert.result "/foo" : join "/" "foo" ;
# assert.result "foo/bar" : join "foo" "bar" ;
@@ -714,57 +715,57 @@ def programs_path ():
# assert.result "foo/giz" : join "foo/giz" "." ;
# assert.result "." : join lib2 ".." ;
# assert.result "/" : join "/a" ".." ;
-#
+#
# assert.result /a/b : join /a/b/c .. ;
-#
+#
# assert.result "foo/bar/giz" : join "foo" "bar" "giz" ;
# assert.result "giz" : join "foo" ".." "giz" ;
# assert.result "foo/giz" : join "foo" "." "giz" ;
-#
+#
# try ;
# {
# join "a" "/b" ;
# }
# catch only first element may be rooted ;
-#
+#
# local CWD = "/home/ghost/build" ;
# assert.result : all-parents . : . : $(CWD) ;
# assert.result . .. ../.. ../../.. : all-parents "Jamfile" : "" : $(CWD) ;
# assert.result foo . .. ../.. ../../.. : all-parents "foo/Jamfile" : "" : $(CWD) ;
# assert.result ../Work .. ../.. ../../.. : all-parents "../Work/Jamfile" : "" : $(CWD) ;
-#
+#
# local CWD = "/home/ghost" ;
# assert.result . .. : all-parents "Jamfile" : "/home" : $(CWD) ;
# assert.result . : all-parents "Jamfile" : "/home/ghost" : $(CWD) ;
-#
+#
# assert.result "c/d" : relative "a/b/c/d" "a/b" ;
# assert.result "foo" : relative "foo" "." ;
-#
+#
# local save-os = [ modules.peek path : os ] ;
# modules.poke path : os : NT ;
-#
+#
# assert.result "foo/bar/giz" : make "foo/bar/giz" ;
# assert.result "foo/bar/giz" : make "foo\\bar\\giz" ;
# assert.result "foo" : make "foo/." ;
# assert.result "foo" : make "foo/bar/.." ;
# assert.result "/D:/My Documents" : make "D:\\My Documents" ;
# assert.result "/c:/boost/tools/build/new/project.jam" : make "c:\\boost\\tools\\build\\test\\..\\new\\project.jam" ;
-#
+#
# assert.result "foo\\bar\\giz" : native "foo/bar/giz" ;
# assert.result "foo" : native "foo" ;
# assert.result "D:\\My Documents\\Work" : native "/D:/My Documents/Work" ;
-#
+#
# modules.poke path : os : UNIX ;
-#
+#
# assert.result "foo/bar/giz" : make "foo/bar/giz" ;
# assert.result "/sub1" : make "/sub1/." ;
-# assert.result "/sub1" : make "/sub1/sub2/.." ;
+# assert.result "/sub1" : make "/sub1/sub2/.." ;
# assert.result "sub1" : make "sub1/." ;
# assert.result "sub1" : make "sub1/sub2/.." ;
# assert.result "/foo/bar" : native "/foo/bar" ;
-#
+#
# modules.poke path : os : VMS ;
-#
+#
# #
# # Don't really need to poke os before these
# #
@@ -776,7 +777,7 @@ def programs_path ():
# assert.result "" "[dir]" "" : split-path-VMS "[dir]" ;
# assert.result "" "" "file" : split-path-VMS "file" ;
# assert.result "" "" "" : split-path-VMS "" ;
-#
+#
# #
# # Special case: current directory
# #
@@ -784,7 +785,7 @@ def programs_path ():
# assert.result "disk:" "[]" "" : split-path-VMS "disk:[]" ;
# assert.result "" "[]" "file" : split-path-VMS "[]file" ;
# assert.result "disk:" "[]" "file" : split-path-VMS "disk:[]file" ;
-#
+#
# #
# # Make portable paths
# #
@@ -799,7 +800,7 @@ def programs_path ():
# assert.result "foo/bar/giz.h" : make "[.foo.bar]giz.h" ;
# assert.result "/disk:/my_docs" : make "disk:[my_docs]" ;
# assert.result "/disk:/boost/tools/build/new/project.jam" : make "disk:[boost.tools.build.test.-.new]project.jam" ;
-#
+#
# #
# # Special case (adds '.' to end of file w/o extension to
# # disambiguate from directory in portable path spec).
@@ -807,7 +808,7 @@ def programs_path ():
# assert.result "Jamfile." : make "Jamfile" ;
# assert.result "dir/Jamfile." : make "[.dir]Jamfile" ;
# assert.result "/disk:/dir/Jamfile." : make "disk:[dir]Jamfile" ;
-#
+#
# #
# # Make native paths
# #
@@ -821,9 +822,9 @@ def programs_path ():
# assert.result "giz.h" : native "giz.h" ;
# assert.result "disk:Jamfile." : native "/disk:Jamfile." ;
# assert.result "disk:[my_docs.work]Jamfile." : native "/disk:/my_docs/work/Jamfile." ;
-#
+#
# modules.poke path : os : $(save-os) ;
-#
+#
# }
#
@@ -837,7 +838,7 @@ def programs_path ():
def glob(dirs, patterns, exclude_patterns=None):
"""Returns the list of files matching the given pattern in the
- specified directory. Both directories and patterns are
+ specified directory. Both directories and patterns are
supplied as portable paths. Each pattern should be non-absolute
path, and can't contain '.' or '..' elements. Each slash separated
element of pattern can contain the following special characters:
@@ -845,8 +846,8 @@ def glob(dirs, patterns, exclude_patterns=None):
- '*', which matches arbitrary number of characters.
A file $(d)/e1/e2/e3 (where 'd' is in $(dirs)) matches pattern p1/p2/p3
if and only if e1 matches p1, e2 matches p2 and so on.
- For example:
- [ glob . : *.cpp ]
+ For example:
+ [ glob . : *.cpp ]
[ glob . : */build/Jamfile ]
"""
@@ -858,7 +859,7 @@ def glob(dirs, patterns, exclude_patterns=None):
else:
assert(isinstance(exclude_patterns, list))
- real_patterns = [os.path.join(d, p) for p in patterns for d in dirs]
+ real_patterns = [os.path.join(d, p) for p in patterns for d in dirs]
real_exclude_patterns = [os.path.join(d, p) for p in exclude_patterns
for d in dirs]
@@ -883,14 +884,14 @@ def glob_tree(roots, patterns, exclude_patterns=None):
subdirs = [s for s in glob(roots, ["*"]) if s != "." and s != ".." and os.path.isdir(s)]
if subdirs:
result.extend(glob_tree(subdirs, patterns, exclude_patterns))
-
+
return result
def glob_in_parents(dir, patterns, upper_limit=None):
"""Recursive version of GLOB which glob sall parent directories
of dir until the first match is found. Returns an empty result if no match
- is found"""
-
+ is found"""
+
assert(isinstance(dir, str))
assert(isinstance(patterns, list))
diff --git a/tools/build/src/util/sequence.py b/tools/build/src/util/sequence.py
index 1d32efd2e3..b5dddbaded 100644
--- a/tools/build/src/util/sequence.py
+++ b/tools/build/src/util/sequence.py
@@ -5,7 +5,11 @@
import operator
+from b2.util import is_iterable
+
+
def unique (values, stable=False):
+ assert is_iterable(values)
if stable:
s = set()
r = []
@@ -21,6 +25,8 @@ def max_element (elements, ordered = None):
""" Returns the maximum number in 'elements'. Uses 'ordered' for comparisons,
or '<' is none is provided.
"""
+ assert is_iterable(elements)
+ assert callable(ordered) or ordered is None
if not ordered: ordered = operator.lt
max = elements [0]
@@ -34,6 +40,8 @@ def select_highest_ranked (elements, ranks):
""" Returns all of 'elements' for which corresponding element in parallel
list 'rank' is equal to the maximum value in 'rank'.
"""
+ assert is_iterable(elements)
+ assert is_iterable(ranks)
if not elements:
return []
diff --git a/tools/build/src/util/set.py b/tools/build/src/util/set.py
index dc7cf32822..f2239a021d 100644
--- a/tools/build/src/util/set.py
+++ b/tools/build/src/util/set.py
@@ -3,11 +3,15 @@
# all copies. This software is provided "as is" without express or implied
# warranty, and with no claim as to its suitability for any purpose.
-from utility import to_seq
+from b2.util import is_iterable
+from .utility import to_seq
+
def difference (b, a):
""" Returns the elements of B that are not in A.
"""
+ assert is_iterable(b)
+ assert is_iterable(a)
result = []
for element in b:
if not element in a:
@@ -18,6 +22,8 @@ def difference (b, a):
def intersection (set1, set2):
""" Removes from set1 any items which don't appear in set2 and returns the result.
"""
+ assert is_iterable(set1)
+ assert is_iterable(set2)
result = []
for v in set1:
if v in set2:
@@ -39,4 +45,6 @@ def equal (a, b):
""" Returns True iff 'a' contains the same elements as 'b', irrespective of their order.
# TODO: Python 2.4 has a proper set class.
"""
+ assert is_iterable(a)
+ assert is_iterable(b)
return contains (a, b) and contains (b, a)
diff --git a/tools/build/src/util/utility.py b/tools/build/src/util/utility.py
index afea765b97..162a57be4f 100644
--- a/tools/build/src/util/utility.py
+++ b/tools/build/src/util/utility.py
@@ -11,6 +11,7 @@ import re
import os
import bjam
from b2.exceptions import *
+from b2.util import is_iterable_typed
__re_grist_and_value = re.compile (r'(<[^>]*>)(.*)')
__re_grist_content = re.compile ('^<(.*)>$')
@@ -40,13 +41,13 @@ def add_grist (features):
features: one string or a sequence of strings
return: the gristed string, if features is a string, or a sequence of gristed strings, if features is a sequence
"""
-
+ assert is_iterable_typed(features, basestring) or isinstance(features, basestring)
def grist_one (feature):
if feature [0] != '<' and feature [len (feature) - 1] != '>':
return '<' + feature + '>'
else:
return feature
-
+
if isinstance (features, str):
return grist_one (features)
else:
@@ -56,6 +57,8 @@ def replace_grist (features, new_grist):
""" Replaces the grist of a string by a new one.
Returns the string with the new grist.
"""
+ assert is_iterable_typed(features, basestring) or isinstance(features, basestring)
+ assert isinstance(new_grist, basestring)
def replace_grist_one (name, new_grist):
split = __re_grist_and_value.match (name)
if not split:
@@ -71,12 +74,14 @@ def replace_grist (features, new_grist):
def get_value (property):
""" Gets the value of a property, that is, the part following the grist, if any.
"""
+ assert is_iterable_typed(property, basestring) or isinstance(property, basestring)
return replace_grist (property, '')
-
+
def get_grist (value):
""" Returns the grist of a string.
If value is a sequence, does it for every value and returns the result as a sequence.
"""
+ assert is_iterable_typed(value, basestring) or isinstance(value, basestring)
def get_grist_one (name):
split = __re_grist_and_value.match (name)
if not split:
@@ -90,9 +95,10 @@ def get_grist (value):
return [ get_grist_one (v) for v in value ]
def ungrist (value):
- """ Returns the value without grist.
+ """ Returns the value without grist.
If value is a sequence, does it for every value and returns the result as a sequence.
"""
+ assert is_iterable_typed(value, basestring) or isinstance(value, basestring)
def ungrist_one (value):
stripped = __re_grist_content.match (value)
if not stripped:
@@ -109,12 +115,15 @@ def replace_suffix (name, new_suffix):
""" Replaces the suffix of name by new_suffix.
If no suffix exists, the new one is added.
"""
+ assert isinstance(name, basestring)
+ assert isinstance(new_suffix, basestring)
split = os.path.splitext (name)
return split [0] + new_suffix
def forward_slashes (s):
""" Converts all backslashes to forward slashes.
"""
+ assert isinstance(s, basestring)
return __re_backslash.sub ('/', s)
@@ -122,6 +131,7 @@ def split_action_id (id):
""" Splits an id in the toolset and specific rule parts. E.g.
'gcc.compile.c++' returns ('gcc', 'compile.c++')
"""
+ assert isinstance(id, basestring)
split = id.split ('.', 1)
toolset = split [0]
name = ''
@@ -136,7 +146,7 @@ def os_name ():
def platform ():
return bjam.variable("OSPLAT")
-
+
def os_version ():
return bjam.variable("OSVER")
diff --git a/tools/build/test/alias.py b/tools/build/test/alias.py
index 4ff4d74d50..7ac4c61099 100644
--- a/tools/build/test/alias.py
+++ b/tools/build/test/alias.py
@@ -67,7 +67,7 @@ def test_alias_source_usage_requirements(t):
Check whether usage requirements are propagated via "alias". In case they
are not, linking will fail as there will be no main() function defined
anywhere in the source.
-
+
"""
t.write("jamroot.jam", """\
lib l : l.cpp : : : <define>WANT_MAIN ;
diff --git a/tools/build/test/cli_property_expansion.py b/tools/build/test/cli_property_expansion.py
new file mode 100644
index 0000000000..24c8216170
--- /dev/null
+++ b/tools/build/test/cli_property_expansion.py
@@ -0,0 +1,41 @@
+#!/usr/bin/python
+
+# Copyright 2015 Aaron Boman
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that free property inside.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", "")
+t.write(
+ "subdir/build.jam",
+ """
+ import feature ;
+ feature.feature my-feature : : free ;
+ """
+)
+t.write(
+ "subdir/subsubdir/build.jam",
+ """
+ exe hello : hello.c ;
+ """
+)
+t.write(
+ "subdir/subsubdir/hello.c",
+ r"""
+ #include <stdio.h>
+
+ int main(int argc, char **argv){
+ printf("%s\n", "Hello, World!");
+ }
+ """
+)
+
+# run from the root directory
+t.run_build_system(['subdir/subsubdir', 'my-feature="some value"'])
+
+t.cleanup()
diff --git a/tools/build/test/composite.py b/tools/build/test/composite.py
index 064c4087dd..a35b88d1a9 100644
--- a/tools/build/test/composite.py
+++ b/tools/build/test/composite.py
@@ -1,8 +1,8 @@
#!/usr/bin/python
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Test that composite properties are handled correctly.
diff --git a/tools/build/test/core_bindrule.py b/tools/build/test/core_bindrule.py
index 3a6916afa7..6ae4ab34c4 100755
--- a/tools/build/test/core_bindrule.py
+++ b/tools/build/test/core_bindrule.py
@@ -1,9 +1,9 @@
#!/usr/bin/python
-# Copyright 2001 Dave Abrahams
+# Copyright 2001 Dave Abrahams
# Copyright 2011 Steven Watanabe
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
import BoostBuild
import os
diff --git a/tools/build/test/core_dependencies.py b/tools/build/test/core_dependencies.py
index 2b2ef368d6..cf9873cb47 100644
--- a/tools/build/test/core_dependencies.py
+++ b/tools/build/test/core_dependencies.py
@@ -1,8 +1,8 @@
#!/usr/bin/python
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# This tests correct handling of dependencies, specifically, on generated
# sources, and from generated sources.
@@ -30,7 +30,7 @@ DEPENDS a : b ;
actions create-b
{
- echo '#include <foo.h>' > $(<)
+ echo '#include <foo.h>' > $(<)
}
copy a : b ;
create-b b ;
@@ -84,7 +84,7 @@ t.run_build_system("-d+2 -f-", stdin=" DEPENDS all : foo.h ; " + code)
t.fail_test(not correct_order(t.stdout()))
# Now foo.h exists. Test include from b -> foo.h -> bar.h -> biz.h. b and foo.h
-# already have updating actions.
+# already have updating actions.
t.rm(["a", "b"])
t.write("foo.h", "#include <bar.h>")
t.write("bar.h", "#include <biz.h>")
@@ -107,14 +107,14 @@ t.fail_test(not correct_order(t.stdout()))
t.rm(["a", "biz.h"])
t.run_build_system("-d+2 -f-", stdin=" DEPENDS all : biz.h ; " + code)
-t.fail_test(not correct_order(t.stdout()))
+t.fail_test(not correct_order(t.stdout()))
t.write("a", "")
code="""
DEPENDS all : main d ;
-actions copy
+actions copy
{
cp $(>) $(<) ;
}
@@ -127,9 +127,9 @@ INCLUDES a : <1>c ;
NOCARE <1>c ;
SEARCH on <1>c = . ;
-actions create-c
+actions create-c
{
- echo d > $(<)
+ echo d > $(<)
}
actions create-d
@@ -144,7 +144,7 @@ create-d d ;
HDRSCAN on <1>c = (.*) ;
HDRRULE on <1>c = hdrrule ;
-rule hdrrule
+rule hdrrule
{
INCLUDES $(1) : d ;
}
diff --git a/tools/build/test/core_language.py b/tools/build/test/core_language.py
index 717e91adaa..88a6d1934c 100755
--- a/tools/build/test/core_language.py
+++ b/tools/build/test/core_language.py
@@ -1,8 +1,8 @@
#!/usr/bin/python
-# Copyright 2002, 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
import BoostBuild
diff --git a/tools/build/test/custom_generator.py b/tools/build/test/custom_generator.py
index 9a1188a03d..8c477a6f2d 100644
--- a/tools/build/test/custom_generator.py
+++ b/tools/build/test/custom_generator.py
@@ -1,8 +1,8 @@
#!/usr/bin/python
-# Copyright 2003, 2004, 2005 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2003, 2004, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Attempt to declare a generator for creating OBJ from RC files. That generator
# should be considered together with standard CPP->OBJ generators and
@@ -14,11 +14,11 @@ import BoostBuild
t = BoostBuild.Tester()
-t.write("jamroot.jam", """
-import rcc ;
+t.write("jamroot.jam", """
+import rcc ;
""")
-t.write("rcc.jam", """
+t.write("rcc.jam", """
import type ;
import generators ;
import print ;
@@ -53,11 +53,11 @@ get_manager().engine().register_action(
'@($(STDOUT):E=rc-object) > "$(<)"')
""")
-t.write("jamfile.jam", """
-obj r : r.rcc ;
+t.write("jamfile.jam", """
+obj r : r.rcc ;
""")
-t.write("r.rcc", """
+t.write("r.rcc", """
""")
t.run_build_system()
diff --git a/tools/build/test/ordered_include.py b/tools/build/test/ordered_include.py
index f91f81fe8a..72ab0d3d75 100644
--- a/tools/build/test/ordered_include.py
+++ b/tools/build/test/ordered_include.py
@@ -95,22 +95,22 @@ def test_basic():
#include <test2.hpp>
int main() {}
""")
-
+
tester.write("a/test1.hpp", """
""")
-
+
tester.write("b/test2.hpp", """
""")
-
+
tester.run_build_system()
-
+
tester.expect_addition("bin/$toolset/debug/test.obj")
-
+
# Check that the dependencies are correct
tester.touch("a/test1.hpp")
tester.run_build_system()
tester.expect_touch("bin/$toolset/debug/test.obj")
-
+
tester.touch("b/test2.hpp")
tester.run_build_system()
tester.expect_touch("bin/$toolset/debug/test.obj")
diff --git a/tools/build/test/print.py b/tools/build/test/print.py
index 65caf95e19..6579bce547 100644
--- a/tools/build/test/print.py
+++ b/tools/build/test/print.py
@@ -1,9 +1,9 @@
#!/usr/bin/python
-# Copyright 2003 Douglas Gregor
-# Copyright 2005 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2003 Douglas Gregor
+# Copyright 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
import BoostBuild
diff --git a/tools/build/test/qt4.py b/tools/build/test/qt4.py
index abb9594d58..170f6079b7 100755
--- a/tools/build/test/qt4.py
+++ b/tools/build/test/qt4.py
@@ -1,8 +1,8 @@
#!/usr/bin/python
# (c) Copyright Juergen Hunold 2008
-# Use, modification, and distribution are subject to the
-# Boost Software License, Version 1.0. (See accompanying file
+# Use, modification, and distribution are subject to the
+# Boost Software License, Version 1.0. (See accompanying file
# LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
import BoostBuild
diff --git a/tools/build/test/qt5.py b/tools/build/test/qt5.py
index 75c4e670f9..d9e1226e80 100755
--- a/tools/build/test/qt5.py
+++ b/tools/build/test/qt5.py
@@ -1,8 +1,8 @@
#!/usr/bin/python
# (c) Copyright Juergen Hunold 2012
-# Use, modification, and distribution are subject to the
-# Boost Software License, Version 1.0. (See accompanying file
+# Use, modification, and distribution are subject to the
+# Boost Software License, Version 1.0. (See accompanying file
# LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
import BoostBuild
diff --git a/tools/build/test/relative_sources.py b/tools/build/test/relative_sources.py
index bd4620fc60..f36e0b097f 100644
--- a/tools/build/test/relative_sources.py
+++ b/tools/build/test/relative_sources.py
@@ -17,7 +17,7 @@ t.write("src/a.cpp", "int main() {}\n")
t.run_build_system()
t.expect_addition("bin/$toolset/debug/src/a.obj")
-
+
# Test that the relative path to source is preserved
# when using 'glob'.
t.rm("bin")
diff --git a/tools/build/test/test1.py b/tools/build/test/test1.py
index 05b3966483..79d142221c 100644
--- a/tools/build/test/test1.py
+++ b/tools/build/test/test1.py
@@ -1,8 +1,8 @@
#!/usr/bin/python
-# Copyright 2002 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2002 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
import BoostBuild
diff --git a/tools/build/test/testing_support.py b/tools/build/test/testing_support.py
index 01a7c48268..ad25b4aad2 100755
--- a/tools/build/test/testing_support.py
+++ b/tools/build/test/testing_support.py
@@ -26,7 +26,7 @@ def test_files_with_spaces_in_their_name():
t.write("valid source.cpp", "int main() {}\n");
t.write("invalid source.cpp", "this is not valid source code");
-
+
t.write("jamroot.jam", """
import testing ;
testing.compile "valid source.cpp" ;